From 443e6135a67b426c0ecc8cd3f264d5fd338f8500 Mon Sep 17 00:00:00 2001 From: l30039603 Date: Wed, 21 Dec 2022 19:52:34 +0800 Subject: [PATCH] fastcheck schema --- README.md | 7 + contrib/dolphin/Makefile | 2 +- .../alter_function_test/alter_function.out | 17 +- .../alter_function_test/alter_procedure.out | 11 +- .../alter_function_test/language_sql.out | 21 +- .../dolphin/expected/ansi_quotes_start.out | 3 +- contrib/dolphin/expected/ansi_quotes_test.out | 31 +- contrib/dolphin/expected/any_value_test.out | 31 +- contrib/dolphin/expected/ast.out | 10 +- contrib/dolphin/expected/b_comments.out | 8 +- contrib/dolphin/expected/b_do_statment.out | 9 +- contrib/dolphin/expected/bit_count.out | 10 +- .../b_compatibility_time_funcs.out | 14 +- .../b_compatibility_time_funcs2.out | 11 +- .../b_compatibility_time_funcs3.out | 11 +- .../expected/builtin_funcs/between.out | 11 +- .../dolphin/expected/builtin_funcs/bin.out | 10 +- .../dolphin/expected/builtin_funcs/cast.out | 10 +- .../dolphin/expected/builtin_funcs/char.out | 10 +- .../expected/builtin_funcs/char_length.out | 10 +- .../builtin_funcs/character_length.out | 10 +- .../dolphin/expected/builtin_funcs/conv.out | 10 +- .../expected/builtin_funcs/convert.out | 10 +- .../dolphin/expected/builtin_funcs/crc32.out | 10 +- .../expected/builtin_funcs/db_b_format.out | 10 +- .../expected/builtin_funcs/db_b_hex.out | 11 +- .../expected/builtin_funcs/db_b_if.out | 13 +- .../dolphin/expected/builtin_funcs/elt.out | 10 +- .../dolphin/expected/builtin_funcs/field.out | 10 +- .../expected/builtin_funcs/find_in_set.out | 10 +- .../expected/builtin_funcs/make_set.out | 10 +- .../expected/builtin_funcs/not_between.out | 10 +- .../expected/builtin_funcs/soundex.out | 10 +- .../dolphin/expected/builtin_funcs/space.out | 10 +- contrib/dolphin/expected/connection_id.out | 10 +- contrib/dolphin/expected/conv_cast_test.out | 12 +- .../create_function_test/call_function.out | 16 +- .../create_function_test/commentsharp.out | 19 +- .../create_function_test/deterministic.out | 15 +- .../create_function_test/language_sql.out | 22 +- .../m_type_create_proc.out | 29 +- .../create_function_test/single_line_proc.out | 27 +- .../create_function_test/sql_options.out | 15 +- .../create_function_test/sql_security.out | 15 +- .../expected/db_b_date_time_functions.out | 11 +- .../expected/db_b_date_time_functions2.out | 11 +- .../expected/db_b_date_time_functions3.out | 11 +- .../expected/db_b_date_time_functions4.out | 11 +- .../dolphin/expected/db_b_new_gram_test.out | 104 +++--- contrib/dolphin/expected/db_b_parser1.out | 10 +- contrib/dolphin/expected/db_b_parser2.out | 10 +- contrib/dolphin/expected/db_b_parser3.out | 13 +- contrib/dolphin/expected/db_b_parser4.out | 19 +- .../dolphin/expected/db_b_plpgsql_test.out | 16 +- .../expected/db_b_rename_user_test.out | 10 +- contrib/dolphin/expected/default_guc.out | 10 +- contrib/dolphin/expected/describe.out | 16 +- .../dolphin/expected/empty_value_lists.out | 57 +++- .../expected/empty_value_support_value.out | 11 +- contrib/dolphin/expected/explain_desc.out | 10 +- contrib/dolphin/expected/export_set.out | 10 +- .../float_numeric_test/db_b_log_test.out | 16 +- .../float_numeric_test/db_b_sqrt_test.out | 16 +- contrib/dolphin/expected/flush.out | 10 +- contrib/dolphin/expected/get_b_database.out | 12 +- contrib/dolphin/expected/greatest_least.out | 10 +- .../dolphin/expected/group_concat_test.out | 13 +- .../dolphin/expected/if_not_exists_test.out | 10 +- contrib/dolphin/expected/implicit_cast.out | 10 +- contrib/dolphin/expected/insert_set.out | 19 +- contrib/dolphin/expected/join_without_on.out | 15 +- contrib/dolphin/expected/json_array.out | 10 +- .../dolphin/expected/json_array_append.out | 10 +- .../dolphin/expected/json_array_insert.out | 10 +- contrib/dolphin/expected/json_arrayagg.out | 12 +- contrib/dolphin/expected/json_contains.out | 10 +- .../dolphin/expected/json_contains_path.out | 10 +- contrib/dolphin/expected/json_depth.out | 10 +- contrib/dolphin/expected/json_extract.out | 10 +- contrib/dolphin/expected/json_insert.out | 10 +- contrib/dolphin/expected/json_keys.out | 11 +- contrib/dolphin/expected/json_length.out | 10 +- contrib/dolphin/expected/json_merge_patch.out | 13 +- .../dolphin/expected/json_merge_preserve.out | 13 +- contrib/dolphin/expected/json_object.out | 13 +- contrib/dolphin/expected/json_objectagg.out | 11 +- contrib/dolphin/expected/json_operator.out | 13 +- contrib/dolphin/expected/json_pretty.out | 10 +- contrib/dolphin/expected/json_quote.out | 10 +- contrib/dolphin/expected/json_remove.out | 13 +- contrib/dolphin/expected/json_replace.out | 10 +- contrib/dolphin/expected/json_search.out | 11 +- contrib/dolphin/expected/json_set.out | 10 +- .../dolphin/expected/json_storage_size.out | 11 +- contrib/dolphin/expected/json_type.out | 11 +- contrib/dolphin/expected/json_unquote.out | 10 +- contrib/dolphin/expected/json_valid.out | 10 +- .../ignore_invalid_input.out | 38 ++- .../ignore_no_matched_partition.out | 9 +- .../ignore_not_null_constraints.out | 42 ++- .../ignore_type_transform.out | 23 +- .../ignore_unique_constraints.out | 13 +- contrib/dolphin/expected/kill.out | 10 +- .../dolphin/expected/like_default_test.out | 245 +++++++-------- .../dolphin/expected/mysqlmode_fullgroup.out | 11 +- contrib/dolphin/expected/mysqlmode_strict.out | 62 +++- .../dolphin/expected/mysqlmode_strict2.out | 62 +++- contrib/dolphin/expected/network.out | 11 +- contrib/dolphin/expected/network2.out | 10 +- .../dolphin/expected/none_strict_warning.out | 21 +- contrib/dolphin/expected/nvarchar.out | 12 +- contrib/dolphin/expected/oct.out | 10 +- contrib/dolphin/expected/option.out | 13 +- .../expected/partition_maxvalue_test.out | 14 +- contrib/dolphin/expected/partition_test1.out | 22 +- contrib/dolphin/expected/partition_test2.out | 19 +- contrib/dolphin/expected/partition_test3.out | 15 +- contrib/dolphin/expected/partition_test4.out | 40 ++- .../dolphin/expected/pl_debugger_client.out | 2 +- .../dolphin/expected/pl_debugger_server.out | 2 +- .../dolphin/expected/read_only_guc_test.out | 12 +- contrib/dolphin/expected/regexp.out | 10 +- .../dolphin/expected/replace_test/replace.out | 17 +- .../dolphin/expected/second_microsecond.out | 8 +- contrib/dolphin/expected/set_password.out | 11 +- contrib/dolphin/expected/show.out | 8 +- contrib/dolphin/expected/show_create.out | 10 +- .../dolphin/expected/show_create_database.out | 10 +- contrib/dolphin/expected/show_variables.out | 10 +- .../dolphin/expected/signed_unsigned_cast.out | 10 +- .../dolphin/expected/single_line_trigger.out | 10 +- .../string_func_test/db_b_ascii_test.out | 16 +- .../db_b_from_base64_test.out | 11 +- .../string_func_test/db_b_insert_test.out | 10 +- .../string_func_test/db_b_left_right_test.out | 16 +- .../string_func_test/db_b_ord_test.out | 11 +- .../string_func_test/db_b_quote_test.out | 10 +- .../db_b_string_length_test.out | 16 +- .../string_func_test/db_b_substr_test.out | 20 +- .../string_func_test/db_b_to_base64_test.out | 11 +- .../string_func_test/db_b_trim_test.out | 16 +- .../string_func_test/db_b_unhex_test.out | 11 +- .../string_func_test/test_substring_index.out | 10 +- contrib/dolphin/expected/test_alter_table.out | 36 +-- contrib/dolphin/expected/test_binary.out | 13 +- contrib/dolphin/expected/test_bit_xor.out | 17 +- contrib/dolphin/expected/test_blob.out | 10 +- contrib/dolphin/expected/test_condition.out | 13 +- .../dolphin/expected/test_current_user.out | 10 +- contrib/dolphin/expected/test_datatype.out | 12 +- contrib/dolphin/expected/test_fixed.out | 12 +- ..._float_double_real_double_precision_MD.out | 17 +- contrib/dolphin/expected/test_mysql_char.out | 10 +- contrib/dolphin/expected/test_mysql_enum.out | 19 +- .../dolphin/expected/test_mysql_operator.out | 30 +- .../dolphin/expected/test_mysql_prepare.out | 170 +++++----- contrib/dolphin/expected/test_op_blob.out | 10 +- .../expected/test_op_xor_boolandfloat.out | 10 +- .../expected/test_op_xor_unsignedint.out | 10 +- contrib/dolphin/expected/test_optimize.out | 10 +- contrib/dolphin/expected/test_schema.out | 16 +- contrib/dolphin/expected/test_set_charset.out | 10 +- contrib/dolphin/expected/test_shows.out | 10 +- contrib/dolphin/expected/test_shows_3.out | 16 +- contrib/dolphin/expected/test_shows_4.out | 10 +- contrib/dolphin/expected/test_shows_5.out | 21 +- contrib/dolphin/expected/test_system_user.out | 10 +- contrib/dolphin/expected/test_table_index.out | 95 +++--- contrib/dolphin/expected/tinyint_agg.out | 90 +++--- contrib/dolphin/expected/tinyint_cast.out | 11 +- contrib/dolphin/expected/tinyint_index.out | 63 ++-- contrib/dolphin/expected/tinyint_operator.out | 10 +- .../dolphin/expected/tinyint_partition.out | 20 +- .../expected/tinyint_smp_join_procedure.out | 89 +++--- contrib/dolphin/expected/uint_agg.out | 60 ++-- contrib/dolphin/expected/uint_and.out | 10 +- .../dolphin/expected/uint_auto_increment.out | 28 +- contrib/dolphin/expected/uint_cast.out | 10 +- contrib/dolphin/expected/uint_cast2.out | 11 +- contrib/dolphin/expected/uint_div.out | 10 +- contrib/dolphin/expected/uint_ignore.out | 11 +- contrib/dolphin/expected/uint_in.out | 15 +- contrib/dolphin/expected/uint_index.out | 295 +++++++++--------- contrib/dolphin/expected/uint_join.out | 13 +- contrib/dolphin/expected/uint_mi.out | 10 +- contrib/dolphin/expected/uint_mod.out | 10 +- contrib/dolphin/expected/uint_mod2.out | 10 +- contrib/dolphin/expected/uint_mul.out | 10 +- contrib/dolphin/expected/uint_numeric.out | 16 +- contrib/dolphin/expected/uint_operator.out | 10 +- contrib/dolphin/expected/uint_or.out | 10 +- contrib/dolphin/expected/uint_partition.out | 23 +- contrib/dolphin/expected/uint_pl.out | 10 +- .../expected/uint_procedure_col_bypass.out | 38 +-- contrib/dolphin/expected/uint_smp.out | 210 +++++++------ contrib/dolphin/expected/uint_sql_mode.out | 11 +- contrib/dolphin/expected/uint_xor.out | 10 +- contrib/dolphin/expected/upsert.out | 33 +- contrib/dolphin/expected/use_dbname.out | 10 +- contrib/dolphin/expected/vec_engine.out | 15 +- contrib/dolphin/expected/zerofill.out | 13 +- contrib/dolphin/parallel_schedule_dolphin | 60 +--- .../alter_function_test/alter_function.sql | 9 +- .../alter_function_test/alter_procedure.sql | 9 +- .../sql/alter_function_test/language_sql.sql | 9 +- contrib/dolphin/sql/ansi_quotes_start.sql | 3 +- contrib/dolphin/sql/ansi_quotes_test.sql | 6 +- contrib/dolphin/sql/any_value_test.sql | 9 +- contrib/dolphin/sql/ast.sql | 9 +- contrib/dolphin/sql/b_comments.sql | 11 +- contrib/dolphin/sql/b_do_statment.sql | 8 +- contrib/dolphin/sql/bit_count.sql | 9 +- .../b_compatibility_time_funcs.sql | 12 +- .../b_compatibility_time_funcs2.sql | 9 +- .../b_compatibility_time_funcs3.sql | 9 +- contrib/dolphin/sql/builtin_funcs/between.sql | 9 +- contrib/dolphin/sql/builtin_funcs/bin.sql | 9 +- contrib/dolphin/sql/builtin_funcs/cast.sql | 9 +- contrib/dolphin/sql/builtin_funcs/char.sql | 9 +- .../dolphin/sql/builtin_funcs/char_length.sql | 9 +- .../sql/builtin_funcs/character_length.sql | 9 +- contrib/dolphin/sql/builtin_funcs/conv.sql | 9 +- contrib/dolphin/sql/builtin_funcs/convert.sql | 9 +- contrib/dolphin/sql/builtin_funcs/crc32.sql | 9 +- .../dolphin/sql/builtin_funcs/db_b_format.sql | 9 +- .../dolphin/sql/builtin_funcs/db_b_hex.sql | 9 +- contrib/dolphin/sql/builtin_funcs/db_b_if.sql | 10 +- contrib/dolphin/sql/builtin_funcs/elt.sql | 9 +- contrib/dolphin/sql/builtin_funcs/field.sql | 9 +- .../dolphin/sql/builtin_funcs/find_in_set.sql | 9 +- .../dolphin/sql/builtin_funcs/make_set.sql | 9 +- .../dolphin/sql/builtin_funcs/not_between.sql | 9 +- contrib/dolphin/sql/builtin_funcs/soundex.sql | 9 +- contrib/dolphin/sql/builtin_funcs/space.sql | 9 +- contrib/dolphin/sql/connection_id.sql | 9 +- contrib/dolphin/sql/conv_cast_test.sql | 11 +- .../create_function_test/call_function.sql | 10 +- .../sql/create_function_test/commentsharp.sql | 10 +- .../create_function_test/deterministic.sql | 9 +- .../sql/create_function_test/language_sql.sql | 9 +- .../m_type_create_proc.sql | 10 +- .../create_function_test/single_line_proc.sql | 11 +- .../sql/create_function_test/sql_options.sql | 10 +- .../sql/create_function_test/sql_security.sql | 9 +- .../dolphin/sql/db_b_date_time_functions.sql | 10 +- .../dolphin/sql/db_b_date_time_functions2.sql | 10 +- .../dolphin/sql/db_b_date_time_functions3.sql | 10 +- .../dolphin/sql/db_b_date_time_functions4.sql | 10 +- contrib/dolphin/sql/db_b_new_gram_test.sql | 16 +- contrib/dolphin/sql/db_b_parser1.sql | 9 +- contrib/dolphin/sql/db_b_parser2.sql | 9 +- contrib/dolphin/sql/db_b_parser3.sql | 9 +- contrib/dolphin/sql/db_b_parser4.sql | 9 +- contrib/dolphin/sql/db_b_plpgsql_test.sql | 9 +- contrib/dolphin/sql/db_b_rename_user_test.sql | 9 +- contrib/dolphin/sql/default_guc.sql | 9 +- contrib/dolphin/sql/describe.sql | 11 +- contrib/dolphin/sql/empty_value_lists.sql | 11 +- .../dolphin/sql/empty_value_support_value.sql | 10 +- contrib/dolphin/sql/explain_desc.sql | 11 +- contrib/dolphin/sql/export_set.sql | 9 +- .../db_b_float_round_test.sql | 15 +- .../sql/float_numeric_test/db_b_log_test.sql | 15 +- .../sql/float_numeric_test/db_b_sqrt_test.sql | 16 +- contrib/dolphin/sql/flush.sql | 9 +- contrib/dolphin/sql/get_b_database.sql | 9 +- contrib/dolphin/sql/greatest_least.sql | 9 +- contrib/dolphin/sql/group_concat_test.sql | 8 +- contrib/dolphin/sql/if_not_exists_test.sql | 9 +- contrib/dolphin/sql/implicit_cast.sql | 9 +- contrib/dolphin/sql/insert_set.sql | 9 +- contrib/dolphin/sql/join_without_on.sql | 9 +- contrib/dolphin/sql/json_array.sql | 9 +- contrib/dolphin/sql/json_array_append.sql | 9 +- contrib/dolphin/sql/json_array_insert.sql | 9 +- contrib/dolphin/sql/json_arrayagg.sql | 9 +- contrib/dolphin/sql/json_contains.sql | 9 +- contrib/dolphin/sql/json_contains_path.sql | 9 +- contrib/dolphin/sql/json_depth.sql | 9 +- contrib/dolphin/sql/json_extract.sql | 9 +- contrib/dolphin/sql/json_insert.sql | 9 +- contrib/dolphin/sql/json_keys.sql | 9 +- contrib/dolphin/sql/json_length.sql | 9 +- contrib/dolphin/sql/json_merge_patch.sql | 9 +- contrib/dolphin/sql/json_merge_preserve.sql | 9 +- contrib/dolphin/sql/json_object.sql | 9 +- contrib/dolphin/sql/json_objectagg.sql | 9 +- contrib/dolphin/sql/json_operator.sql | 9 +- contrib/dolphin/sql/json_pretty.sql | 9 +- contrib/dolphin/sql/json_quote.sql | 9 +- contrib/dolphin/sql/json_remove.sql | 9 +- contrib/dolphin/sql/json_replace.sql | 9 +- contrib/dolphin/sql/json_search.sql | 9 +- contrib/dolphin/sql/json_set.sql | 9 +- contrib/dolphin/sql/json_storage_size.sql | 10 +- contrib/dolphin/sql/json_type.sql | 9 +- contrib/dolphin/sql/json_unquote.sql | 9 +- contrib/dolphin/sql/json_valid.sql | 9 +- .../ignore_invalid_input.sql | 8 +- .../ignore_no_matched_partition.sql | 8 +- .../ignore_not_null_constraints.sql | 8 +- .../ignore_type_transform.sql | 8 +- .../ignore_unique_constraints.sql | 8 +- contrib/dolphin/sql/kill.sql | 9 +- contrib/dolphin/sql/like_default_test.sql | 17 +- contrib/dolphin/sql/mysqlmode_fullgroup.sql | 8 +- contrib/dolphin/sql/mysqlmode_strict.sql | 10 +- contrib/dolphin/sql/mysqlmode_strict2.sql | 10 +- contrib/dolphin/sql/network.sql | 9 +- contrib/dolphin/sql/network2.sql | 9 +- contrib/dolphin/sql/none_strict_warning.sql | 11 +- contrib/dolphin/sql/nvarchar.sql | 11 +- contrib/dolphin/sql/oct.sql | 9 +- contrib/dolphin/sql/option.sql | 10 +- .../dolphin/sql/partition_maxvalue_test.sql | 9 +- contrib/dolphin/sql/partition_test1.sql | 16 +- contrib/dolphin/sql/partition_test2.sql | 10 +- contrib/dolphin/sql/partition_test3.sql | 10 +- contrib/dolphin/sql/partition_test4.sql | 9 +- contrib/dolphin/sql/pl_debugger_client.sql | 2 +- contrib/dolphin/sql/pl_debugger_server.sql | 2 +- contrib/dolphin/sql/read_only_guc_test.sql | 11 +- contrib/dolphin/sql/regexp.sql | 9 +- contrib/dolphin/sql/replace_test/replace.sql | 9 +- contrib/dolphin/sql/second_microsecond.sql | 8 +- contrib/dolphin/sql/set_password.sql | 12 +- contrib/dolphin/sql/show.sql | 8 +- contrib/dolphin/sql/show_create.sql | 9 +- contrib/dolphin/sql/show_create_database.sql | 9 +- contrib/dolphin/sql/show_variables.sql | 9 +- contrib/dolphin/sql/signed_unsigned_cast.sql | 9 +- contrib/dolphin/sql/single_line_trigger.sql | 9 +- .../sql/string_func_test/db_b_ascii_test.sql | 15 +- .../db_b_from_base64_test.sql | 9 +- .../sql/string_func_test/db_b_insert_test.sql | 9 +- .../string_func_test/db_b_left_right_test.sql | 16 +- .../sql/string_func_test/db_b_ord_test.sql | 9 +- .../sql/string_func_test/db_b_quote_test.sql | 9 +- .../db_b_string_length_test.sql | 16 +- .../sql/string_func_test/db_b_substr_test.sql | 16 +- .../string_func_test/db_b_to_base64_test.sql | 9 +- .../sql/string_func_test/db_b_trim_test.sql | 16 +- .../sql/string_func_test/db_b_unhex_test.sql | 9 +- .../string_func_test/test_substring_index.sql | 9 +- contrib/dolphin/sql/test_alter_table.sql | 9 +- contrib/dolphin/sql/test_binary.sql | 9 +- contrib/dolphin/sql/test_bit_xor.sql | 9 +- contrib/dolphin/sql/test_blob.sql | 9 +- contrib/dolphin/sql/test_checksum.sql | 10 +- contrib/dolphin/sql/test_condition.sql | 11 +- contrib/dolphin/sql/test_current_user.sql | 9 +- contrib/dolphin/sql/test_datatype.sql | 9 +- contrib/dolphin/sql/test_fixed.sql | 9 +- ..._float_double_real_double_precision_MD.sql | 8 +- contrib/dolphin/sql/test_mysql_char.sql | 9 +- contrib/dolphin/sql/test_mysql_enum.sql | 11 +- contrib/dolphin/sql/test_mysql_operator.sql | 27 +- contrib/dolphin/sql/test_mysql_prepare.sql | 9 +- contrib/dolphin/sql/test_op_blob.sql | 9 +- .../dolphin/sql/test_op_xor_boolandfloat.sql | 9 +- .../dolphin/sql/test_op_xor_unsignedint.sql | 9 +- contrib/dolphin/sql/test_optimize.sql | 9 +- contrib/dolphin/sql/test_proxy.sql | 9 +- contrib/dolphin/sql/test_schema.sql | 9 +- contrib/dolphin/sql/test_set_charset.sql | 9 +- contrib/dolphin/sql/test_shows.sql | 9 +- contrib/dolphin/sql/test_shows_3.sql | 10 +- contrib/dolphin/sql/test_shows_4.sql | 9 +- contrib/dolphin/sql/test_shows_5.sql | 13 +- contrib/dolphin/sql/test_system_user.sql | 9 +- contrib/dolphin/sql/test_table_index.sql | 10 +- contrib/dolphin/sql/tinyint_agg.sql | 9 +- contrib/dolphin/sql/tinyint_cast.sql | 9 +- contrib/dolphin/sql/tinyint_index.sql | 10 +- contrib/dolphin/sql/tinyint_operator.sql | 9 +- contrib/dolphin/sql/tinyint_partition.sql | 9 +- .../sql/tinyint_smp_join_procedure.sql | 9 +- contrib/dolphin/sql/uint_agg.sql | 9 +- contrib/dolphin/sql/uint_and.sql | 9 +- contrib/dolphin/sql/uint_auto_increment.sql | 10 +- contrib/dolphin/sql/uint_cast.sql | 9 +- contrib/dolphin/sql/uint_cast2.sql | 9 +- contrib/dolphin/sql/uint_cast3.sql | 9 +- contrib/dolphin/sql/uint_div.sql | 9 +- contrib/dolphin/sql/uint_ignore.sql | 9 +- contrib/dolphin/sql/uint_in.sql | 9 +- contrib/dolphin/sql/uint_index.sql | 9 +- contrib/dolphin/sql/uint_join.sql | 9 +- contrib/dolphin/sql/uint_mi.sql | 9 +- contrib/dolphin/sql/uint_mod.sql | 9 +- contrib/dolphin/sql/uint_mod2.sql | 9 +- contrib/dolphin/sql/uint_mul.sql | 9 +- contrib/dolphin/sql/uint_numeric.sql | 9 +- contrib/dolphin/sql/uint_operator.sql | 9 +- contrib/dolphin/sql/uint_or.sql | 9 +- contrib/dolphin/sql/uint_partition.sql | 9 +- contrib/dolphin/sql/uint_pl.sql | 9 +- .../dolphin/sql/uint_procedure_col_bypass.sql | 9 +- contrib/dolphin/sql/uint_smp.sql | 9 +- contrib/dolphin/sql/uint_sql_mode.sql | 9 +- contrib/dolphin/sql/uint_xor.sql | 9 +- contrib/dolphin/sql/upsert.sql | 9 +- contrib/dolphin/sql/use_dbname.sql | 9 +- contrib/dolphin/sql/vec_engine.sql | 8 +- contrib/dolphin/sql/zerofill.sql | 9 +- 405 files changed, 3088 insertions(+), 3006 deletions(-) mode change 100755 => 100644 contrib/dolphin/expected/alter_function_test/alter_function.out mode change 100755 => 100644 contrib/dolphin/expected/alter_function_test/alter_procedure.out mode change 100755 => 100644 contrib/dolphin/expected/create_function_test/call_function.out mode change 100755 => 100644 contrib/dolphin/expected/create_function_test/deterministic.out mode change 100755 => 100644 contrib/dolphin/expected/create_function_test/language_sql.out mode change 100755 => 100644 contrib/dolphin/expected/create_function_test/sql_options.out mode change 100755 => 100644 contrib/dolphin/expected/create_function_test/sql_security.out mode change 100755 => 100644 contrib/dolphin/expected/read_only_guc_test.out mode change 100755 => 100644 contrib/dolphin/expected/replace_test/replace.out diff --git a/README.md b/README.md index 4236edfd9..73d47eee1 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,13 @@ Gitee 是 OSCHINA 推出的基于 Git 的代码托管平台(同时支持 SVN 2. 修改如涉及文档,需要同步在docs仓提交文档修改,插件相关文档入口: https://gitee.com/opengauss/docs/tree/master/content/zh/docs/Developerguide/dolphin-Extension.md 。注意添加SQL语法时,需要增加必要的示例。 3. 新增/修改的代码需要使用宏 DOLPHIN 进行控制,方便后续回合openGauss-server仓代码时,区分哪些是插件修改的代码,哪些是内核修改的代码。修改的代码通过宏的IF/ELSE分支保留原始代码。主要控制 ```.h/.cpp``` 文件, ```.y``` 文件不太好使用宏控制,可以不处理。 +### check用例编写规范 +1. check用例默认使用的数据库为contrib_regression数据库,B兼容类型。编写用例时无需自己手动创建B类型数据库。 +2. 建议通过schema的方式隔离不同用例间的结果影响。可参考现有用例的写法。 +3. 单个用例执行时间不宜太长,建议不超过10s,超过的应当考虑优化用例或进行拆分。 +4. 非必要不新增测试组,一个测试组可允许5~10个用例一起并行执行。 +5. 对于SELECT语句强烈建议增加order by子句,保证SELECT语句查询结果稳定。 + #### 特技 1. 使用 Readme\_XXX.md 来支持不同的语言,例如 Readme\_en.md, Readme\_zh.md diff --git a/contrib/dolphin/Makefile b/contrib/dolphin/Makefile index 2bd6f1571..c31dd323b 100644 --- a/contrib/dolphin/Makefile +++ b/contrib/dolphin/Makefile @@ -132,7 +132,7 @@ ifdef MJDBC_TEST REGRESS+=b_proto_jdbc endif -REGRESS_OPTS = --dlpath=$(top_builddir)/src/test/regress -c 0 -d 1 --single_node -p ${p} --schedule=./parallel_schedule_dolphin${PART} --regconf=regress.conf -r 1 -n --keep_last_data=false --temp-config=./make_check_postgresql.conf +REGRESS_OPTS = --dlpath=$(top_builddir)/src/test/regress -c 0 -d 1 --single_node -p ${p} --schedule=./parallel_schedule_dolphin${PART} --regconf=regress.conf -r 1 -n --keep_last_data=false --temp-config=./make_check_postgresql.conf --dbcmpt=B export dp = $(shell expr $(p) + 3) export THIRD_PARTY_LIBS = $(with_3rd) diff --git a/contrib/dolphin/expected/alter_function_test/alter_function.out b/contrib/dolphin/expected/alter_function_test/alter_function.out old mode 100755 new mode 100644 index f1ebb93a8..3a256079c --- a/contrib/dolphin/expected/alter_function_test/alter_function.out +++ b/contrib/dolphin/expected/alter_function_test/alter_function.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_func_1; -NOTICE: database "db_alter_func_1" does not exist, skipping -create database db_alter_func_1 dbcompatibility 'B'; -\c db_alter_func_1 +create schema db_alter_func_1; +set current_schema to 'db_alter_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; ALTER FUNCTION f1 (s char(20)) NO SQL; ALTER FUNCTION f1 (s char(20)) CONTAINS SQL; @@ -103,5 +101,12 @@ select * from t1; 1 (3 rows) -\c postgres -drop database db_alter_func_1; +drop schema db_alter_func_1 cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f3() +drop cascades to function f5(integer) +drop cascades to function f5(real) +drop cascades to function f5() +drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/alter_function_test/alter_procedure.out b/contrib/dolphin/expected/alter_function_test/alter_procedure.out old mode 100755 new mode 100644 index 6fa9c5944..9b33face2 --- a/contrib/dolphin/expected/alter_function_test/alter_procedure.out +++ b/contrib/dolphin/expected/alter_function_test/alter_procedure.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_func_2; -NOTICE: database "db_alter_func_2" does not exist, skipping -create database db_alter_func_2 dbcompatibility 'B'; -\c db_alter_func_2 +create schema db_alter_func_2; +set current_schema to 'db_alter_func_2'; CREATE OR REPLACE PROCEDURE proc1() AS BEGIN @@ -64,5 +62,6 @@ ERROR: function "proc1" already exists with same argument types -- 修改不存在的存储过程 ALTER PROCEDURE proc2 READS SQL DATA; ERROR: function proc2 does not exist -\c postgres -drop database db_alter_func_2; +drop schema db_alter_func_2 cascade; +NOTICE: drop cascades to function proc1() +reset current_schema; diff --git a/contrib/dolphin/expected/alter_function_test/language_sql.out b/contrib/dolphin/expected/alter_function_test/language_sql.out index ed911fc79..99635ead6 100644 --- a/contrib/dolphin/expected/alter_function_test/language_sql.out +++ b/contrib/dolphin/expected/alter_function_test/language_sql.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_func_sql; -NOTICE: database "db_alter_func_sql" does not exist, skipping -create database db_alter_func_sql dbcompatibility 'B'; -\c db_alter_func_sql +create schema db_alter_func_sql; +set current_schema to 'db_alter_func_sql'; -- test func CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ begin @@ -194,5 +192,16 @@ call pro_3(1,2,'a'); (1 row) -\c postgres -drop database db_alter_func_sql; +drop schema db_alter_func_sql cascade; +NOTICE: drop cascades to 10 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function func(integer) +drop cascades to function proc1(integer) +drop cascades to table language_1093039 +drop cascades to function fun_1(integer,integer,character varying) +drop cascades to function fun_2(integer,integer,character varying) +drop cascades to function fun_3(integer,integer,character varying) +drop cascades to function pro_1(integer,integer,character varying) +drop cascades to function pro_2(integer,integer,character varying) +drop cascades to function pro_3(integer,integer,character varying) +reset current_schema; diff --git a/contrib/dolphin/expected/ansi_quotes_start.out b/contrib/dolphin/expected/ansi_quotes_start.out index bde70e0aa..8855fe711 100644 --- a/contrib/dolphin/expected/ansi_quotes_start.out +++ b/contrib/dolphin/expected/ansi_quotes_start.out @@ -1,2 +1 @@ -CREATE DATABASE test_ansi_quotes DBCOMPATIBILITY 'B'; -\c test_ansi_quotes +create schema test_ansi_quotes; diff --git a/contrib/dolphin/expected/ansi_quotes_test.out b/contrib/dolphin/expected/ansi_quotes_test.out index 6aa888e21..09f9a9376 100644 --- a/contrib/dolphin/expected/ansi_quotes_test.out +++ b/contrib/dolphin/expected/ansi_quotes_test.out @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; CREATE TABLE test_quotes (a text); show dolphin.sql_mode; dolphin.sql_mode @@ -67,27 +67,23 @@ desc test_quotes_2; -- test show show tables; - Tables_in_public -------------------------------- - index_statistic - pg_type_nonstrict_basic_value + Tables_in_test_ansi_quotes +---------------------------- test_quotes test_quotes_2 -(4 rows) +(2 rows) show full tables; - Tables_in_public | Table_type --------------------------------+------------ - index_statistic | VIEW - pg_type_nonstrict_basic_value | VIEW - test_quotes | BASE TABLE - test_quotes_2 | BASE TABLE -(4 rows) + Tables_in_test_ansi_quotes | Table_type +----------------------------+------------ + test_quotes | BASE TABLE + test_quotes_2 | BASE TABLE +(2 rows) show create table test_quotes_2; Table | Create Table ---------------+----------------------------------------- - test_quotes_2 | SET search_path = public; + + test_quotes_2 | SET search_path = test_ansi_quotes; + | CREATE TABLE test_quotes_2 ( + | a text + | ) + @@ -147,5 +143,8 @@ select * from test_quotes where a = "test1"; test1 (1 row) -\c postgres -DROP DATABASE test_ansi_quotes; +drop schema test_ansi_quotes cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test_quotes +drop cascades to table test_quotes_2 +reset current_schema; diff --git a/contrib/dolphin/expected/any_value_test.out b/contrib/dolphin/expected/any_value_test.out index 4cf1549af..33f3978b7 100644 --- a/contrib/dolphin/expected/any_value_test.out +++ b/contrib/dolphin/expected/any_value_test.out @@ -1,7 +1,5 @@ -drop DATABASE if exists any_value_test; -NOTICE: database "any_value_test" does not exist, skipping -CREATE DATABASE any_value_test dbcompatibility 'B'; -\c any_value_test; +create schema any_value_test; +set current_schema to 'any_value_test'; --test int type create table test_int1(a tinyint, b int); create table test_int2(a smallint, b int); @@ -192,5 +190,26 @@ select any_value(c) from test_blob_bytea group by a; \xdabc (2 rows) -\c postgres; -drop DATABASE if exists any_value_test; +drop schema any_value_test cascade; +NOTICE: drop cascades to 20 other objects +DETAIL: drop cascades to table test_int1 +drop cascades to table test_int2 +drop cascades to table test_int4 +drop cascades to table test_int8 +drop cascades to table test_uint1 +drop cascades to table test_uint2 +drop cascades to table test_uint4 +drop cascades to table test_uint8 +drop cascades to table test_float +drop cascades to table test_double +drop cascades to table test_numeric +drop cascades to table test_char +drop cascades to table test_varchar +drop cascades to table test_text +drop cascades to table test_date +drop cascades to table test_bool +drop cascades to table test_year +drop cascades to type test_set_a_set +drop cascades to table test_set +drop cascades to table test_blob_bytea +reset current_schema; diff --git a/contrib/dolphin/expected/ast.out b/contrib/dolphin/expected/ast.out index 1cec80f94..b14aa1cbb 100644 --- a/contrib/dolphin/expected/ast.out +++ b/contrib/dolphin/expected/ast.out @@ -1,7 +1,5 @@ -drop database if exists ast_test; -NOTICE: database "ast_test" does not exist, skipping -create database ast_test dbcompatibility 'b'; -\c ast_test +create schema ast_test; +set current_schema to 'ast_test'; ast select * from test; ast create table test(id int); ast create table test(id int(5)); @@ -13,5 +11,5 @@ ast select 1;select 1; (1 row) ast select 1;ast select 1; -\c postgres -drop database ast_test; +drop schema ast_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/b_comments.out b/contrib/dolphin/expected/b_comments.out index ed49c110f..24c21c171 100644 --- a/contrib/dolphin/expected/b_comments.out +++ b/contrib/dolphin/expected/b_comments.out @@ -1,4 +1,6 @@ /* unsupported */ +create database b_comments dbcompatibility 'A'; +\c b_comments create schema b_comments; set search_path to 'b_comments'; create table test_row(a int not null comment 'test_row.a'); @@ -51,8 +53,8 @@ NOTICE: drop cascades to 3 other objects DETAIL: drop cascades to table test_alter drop cascades to function test_alter_function(integer) drop cascades to function test_alter_procedure(integer,integer) -create database b_comments dbcompatibility 'B'; -\c b_comments +\c contrib_regression +drop database b_comments; create schema b_comments; set search_path to 'b_comments'; /* unsupported */ @@ -267,5 +269,3 @@ drop cascades to table fvt_distribute_query_tables_02 drop cascades to table t_comment_0032 drop cascades to table t_comment_0034 reset search_path; -\c postgres -drop database b_comments; diff --git a/contrib/dolphin/expected/b_do_statment.out b/contrib/dolphin/expected/b_do_statment.out index 1f805197f..932fcfb17 100644 --- a/contrib/dolphin/expected/b_do_statment.out +++ b/contrib/dolphin/expected/b_do_statment.out @@ -1,5 +1,5 @@ -create database db_do_stmt dbcompatibility = 'B'; -\c db_do_stmt +create schema db_do_stmt; +set current_schema to 'db_do_stmt'; create table t1 (a int); insert into t1 values(1),(4),(7); select a from t1; @@ -57,5 +57,6 @@ do sin(a) from t1; ERROR: syntax error at or near "from" LINE 1: do sin(a) from t1; ^ -\c regress -\connect: FATAL: database "regress" does not exist +drop schema db_do_stmt cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/bit_count.out b/contrib/dolphin/expected/bit_count.out index 28bcb7c0e..939e3a3f7 100644 --- a/contrib/dolphin/expected/bit_count.out +++ b/contrib/dolphin/expected/bit_count.out @@ -1,7 +1,5 @@ -drop database if exists test_bit_count; -NOTICE: database "test_bit_count" does not exist, skipping -create database test_bit_count dbcompatibility 'b'; -\c test_bit_count +create schema test_bit_count; +set current_schema to 'test_bit_count'; -- 测试数字,字符串,二进制输入 SELECT bit_count(29); bit_count @@ -204,5 +202,5 @@ select bit_count(b'1000000011111111111111111111111111111111111111111111111111111 64 (1 row) -\c postgres -drop database test_bit_count; +drop schema test_bit_count cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out index f403d370f..5432253af 100644 --- a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out +++ b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs.out @@ -1,10 +1,5 @@ --- --- Test All Time function under 'b' compatibility --- -drop database if exists b_time_funcs; -NOTICE: database "b_time_funcs" does not exist, skipping -create database b_time_funcs dbcompatibility 'b'; -\c b_time_funcs +create schema b_time_funcs; +set current_schema to 'b_time_funcs'; create table func_test(functionName varchar(256),result varchar(256)); truncate table func_test; -- makedate() @@ -488,5 +483,6 @@ select * from func_test; subdate('2022-01-01 01:01:01', interval 1.999 second) | Sat Jan 01 01:00:59.001 2022 --?.* -\c postgres -drop database if exists b_time_funcs; +drop schema b_time_funcs cascade; +NOTICE: drop cascades to table func_test +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out index f3d1a682c..77e91a13d 100644 --- a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out +++ b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs2.out @@ -2,10 +2,8 @@ -- Test Time functions(Stage 2) under 'b' compatibility -- Contains subtime()、timediff()、time()、time_format()、timestamp()、timestampadd() -- -drop database if exists b_time_funcs2; -NOTICE: database "b_time_funcs2" does not exist, skipping -create database b_time_funcs2 dbcompatibility 'b'; -\c b_time_funcs2 +create schema b_time_funcs2; +set current_schema to 'b_time_funcs2'; create table func_test2(functionName varchar(256),result varchar(256)); truncate table func_test2; -- SUBTIME() @@ -1039,5 +1037,6 @@ select * from func_test2; TIMESTAMPADD(SECOND,-0.001,'2022-07-27 00:00:00') | Tue Jul 26 23:59:59.999 2022 --?.* -\c postgres -drop database if exists b_time_funcs2; +drop schema b_time_funcs2 cascade; +NOTICE: drop cascades to table func_test2 +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out index 7c440d2ad..041de9286 100644 --- a/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out +++ b/contrib/dolphin/expected/builtin_funcs/b_compatibility_time_funcs3.out @@ -2,10 +2,8 @@ -- Test Time functions(Stage 3) under 'b' compatibility -- Contains to_days(), to_seconds(), unix_timestamp(), utc_date(), utc_time()、timestampadd() -- -drop database if exists b_time_funcs3; -NOTICE: database "b_time_funcs3" does not exist, skipping -create database b_time_funcs3 dbcompatibility 'b'; -\c b_time_funcs3 +create schema b_time_funcs3; +set current_schema to 'b_time_funcs3'; create table func_test3(functionName varchar(256),result varchar(256)); truncate table func_test3; -- TO_DAYS() @@ -385,5 +383,6 @@ select * from func_test3; --? UTC_TIMESTAMP(6) | .* (101 rows) -\c postgres -drop database if exists b_time_funcs3; +drop schema b_time_funcs3 cascade; +NOTICE: drop cascades to table func_test3 +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/between.out b/contrib/dolphin/expected/builtin_funcs/between.out index 3190cfcf4..178482705 100644 --- a/contrib/dolphin/expected/builtin_funcs/between.out +++ b/contrib/dolphin/expected/builtin_funcs/between.out @@ -1,7 +1,5 @@ -drop database if exists db_between; -NOTICE: database "db_between" does not exist, skipping -create database db_between dbcompatibility 'B'; -\c db_between +create schema db_between; +set current_schema to 'db_between'; select 2 between 2 and 23; ?column? ---------- @@ -361,5 +359,6 @@ select distinct c_town from t_between_and_0023 where c_town between 'b' and 'nz' (5 rows) drop table t_between_and_0023; -\c postgres -drop database if exists db_between; +drop schema db_between cascade; +NOTICE: drop cascades to table t_between_and_0007 +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/bin.out b/contrib/dolphin/expected/builtin_funcs/bin.out index c6ee11dfe..dc950ce45 100644 --- a/contrib/dolphin/expected/builtin_funcs/bin.out +++ b/contrib/dolphin/expected/builtin_funcs/bin.out @@ -1,7 +1,5 @@ -drop database if exists db_bin; -NOTICE: database "db_bin" does not exist, skipping -create database db_bin dbcompatibility 'B'; -\c db_bin +create schema db_bin; +set current_schema to 'db_bin'; select bin(1); bin ----- @@ -62,5 +60,5 @@ select bin('测试'); 0 (1 row) -\c postgres -drop database if exists db_bin; +drop schema db_bin cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/cast.out b/contrib/dolphin/expected/builtin_funcs/cast.out index 49731db01..2678b09e4 100644 --- a/contrib/dolphin/expected/builtin_funcs/cast.out +++ b/contrib/dolphin/expected/builtin_funcs/cast.out @@ -1,7 +1,5 @@ -drop database if exists db_cast; -NOTICE: database "db_cast" does not exist, skipping -create database db_cast dbcompatibility 'B'; -\c db_cast +create schema db_cast; +set current_schema to 'db_cast'; select cast('$2'::money as unsigned); uint8 ------- @@ -38,5 +36,5 @@ select cast(cast('2022-11-10 18:03:20'::timestamp as unsigned) as timestamp); Fri Nov 11 02:03:20 2022 PST (1 row) -\c postgres -drop database if exists db_cast; +drop schema db_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/char.out b/contrib/dolphin/expected/builtin_funcs/char.out index a12e79598..76ed18f04 100644 --- a/contrib/dolphin/expected/builtin_funcs/char.out +++ b/contrib/dolphin/expected/builtin_funcs/char.out @@ -1,7 +1,5 @@ -drop database if exists db_char; -NOTICE: database "db_char" does not exist, skipping -create database db_char dbcompatibility 'B'; -\c db_char +create schema db_char; +set current_schema to 'db_char'; select char(67,66,67); char ------ @@ -128,5 +126,5 @@ select char('hiu158','测试',125.99,146); ~ (1 row) -\c postgres -drop database if exists db_char; +drop schema db_char cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/char_length.out b/contrib/dolphin/expected/builtin_funcs/char_length.out index fba0da928..f31c7e201 100644 --- a/contrib/dolphin/expected/builtin_funcs/char_length.out +++ b/contrib/dolphin/expected/builtin_funcs/char_length.out @@ -1,7 +1,5 @@ -drop database if exists db_char_length; -NOTICE: database "db_char_length" does not exist, skipping -create database db_char_length dbcompatibility 'B'; -\c db_char_length +create schema db_char_length; +set current_schema to 'db_char_length'; select char_length(1234); char_length ------------- @@ -92,8 +90,8 @@ select char_length(true); 1 (1 row) -\c postgres -drop database if exists db_char_length; +drop schema db_char_length cascade; +reset current_schema; drop database if exists db_char_length_gbk; NOTICE: database "db_char_length_gbk" does not exist, skipping create database db_char_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; diff --git a/contrib/dolphin/expected/builtin_funcs/character_length.out b/contrib/dolphin/expected/builtin_funcs/character_length.out index 718df4253..458d01ee0 100644 --- a/contrib/dolphin/expected/builtin_funcs/character_length.out +++ b/contrib/dolphin/expected/builtin_funcs/character_length.out @@ -1,7 +1,5 @@ -drop database if exists db_character_length; -NOTICE: database "db_character_length" does not exist, skipping -create database db_character_length dbcompatibility 'B'; -\c db_character_length +create schema db_character_length; +set current_schema to 'db_character_length'; select character_length(1234); character_length ------------------ @@ -92,8 +90,8 @@ select character_length(true); 1 (1 row) -\c postgres -drop database if exists db_character_length; +drop schema db_character_length cascade; +reset current_schema; drop database if exists db_character_length_gbk; NOTICE: database "db_character_length_gbk" does not exist, skipping create database db_character_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; diff --git a/contrib/dolphin/expected/builtin_funcs/conv.out b/contrib/dolphin/expected/builtin_funcs/conv.out index 50e1a6902..ee937d579 100644 --- a/contrib/dolphin/expected/builtin_funcs/conv.out +++ b/contrib/dolphin/expected/builtin_funcs/conv.out @@ -1,7 +1,5 @@ -drop database if exists db_conv; -NOTICE: database "db_conv" does not exist, skipping -create database db_conv dbcompatibility 'B'; -\c db_conv +create schema db_conv; +set current_schema to 'db_conv'; select conv('a',16,2); conv ------ @@ -504,5 +502,5 @@ select conv(-9544646155975628532428411,-10,-10); -9223372036854775808 (1 row) -\c postgres -drop database if exists db_conv; +drop schema db_conv cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/convert.out b/contrib/dolphin/expected/builtin_funcs/convert.out index 2354ea84a..29b4022f9 100644 --- a/contrib/dolphin/expected/builtin_funcs/convert.out +++ b/contrib/dolphin/expected/builtin_funcs/convert.out @@ -1,7 +1,5 @@ -drop database if exists db_convert; -NOTICE: database "db_convert" does not exist, skipping -create database db_convert dbcompatibility 'B'; -\c db_convert +create schema db_convert; +set current_schema to 'db_convert'; select convert(1 using 'utf8'); convert --------- @@ -78,5 +76,5 @@ select convert(1 using decimal(10,3)); 1.000 (1 row) -\c postgres -drop database if exists db_convert; +drop schema db_convert cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/crc32.out b/contrib/dolphin/expected/builtin_funcs/crc32.out index b44673211..471dc2f59 100644 --- a/contrib/dolphin/expected/builtin_funcs/crc32.out +++ b/contrib/dolphin/expected/builtin_funcs/crc32.out @@ -1,7 +1,5 @@ -drop database if exists db_crc32; -NOTICE: database "db_crc32" does not exist, skipping -create database db_crc32 dbcompatibility 'B'; -\c db_crc32 +create schema db_crc32; +set current_schema to 'db_crc32'; select crc32('abc'); crc32 ----------- @@ -26,5 +24,5 @@ select crc32(10),crc32(-3.1415926),crc32(1.339E5),crc32('ab57'),crc32('HAF47'); 2707236321 | 1632764266 | 2833135858 | 4076943245 | 4203314247 (1 row) -\c postgres -drop database if exists db_crc32; +drop schema db_crc32 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/db_b_format.out b/contrib/dolphin/expected/builtin_funcs/db_b_format.out index a47df3a34..e4268d76d 100644 --- a/contrib/dolphin/expected/builtin_funcs/db_b_format.out +++ b/contrib/dolphin/expected/builtin_funcs/db_b_format.out @@ -1,7 +1,5 @@ -drop database if exists db_db_b_format; -NOTICE: database "db_db_b_format" does not exist, skipping -create database db_db_b_format dbcompatibility 'B'; -\c db_db_b_format +create schema db_db_b_format; +set current_schema to 'db_db_b_format'; -- test for b_compatibility_mode = false select format(1234.456, 2); format @@ -567,8 +565,8 @@ select format('%2$s, %1$s', variadic array[1, 2]); 2, 1 (1 row) -\c postgres -drop database db_db_b_format; +drop schema db_db_b_format cascade; +reset current_schema; -- test for A compatibility to ensure the original functionality is good. create database db_db_b_format dbcompatibility 'A'; \c db_db_b_format diff --git a/contrib/dolphin/expected/builtin_funcs/db_b_hex.out b/contrib/dolphin/expected/builtin_funcs/db_b_hex.out index 86fb7dc39..0aa71e6af 100644 --- a/contrib/dolphin/expected/builtin_funcs/db_b_hex.out +++ b/contrib/dolphin/expected/builtin_funcs/db_b_hex.out @@ -1,7 +1,5 @@ -drop database if exists db_b_hex; -NOTICE: database "db_b_hex" does not exist, skipping -create database db_b_hex dbcompatibility 'B'; -\c db_b_hex +create schema db_b_hex; +set current_schema to 'db_b_hex'; select hex(int1(255)); ERROR: tinyint out of range CONTEXT: referenced column: to_number @@ -203,5 +201,6 @@ select hex(c1) from bytea_to_hex_test; deadbeef (1 row) -\c postgres -drop database if exists db_b_hex; +drop schema db_b_hex cascade; +NOTICE: drop cascades to table bytea_to_hex_test +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/db_b_if.out b/contrib/dolphin/expected/builtin_funcs/db_b_if.out index 94e00fdce..7178b2cfe 100644 --- a/contrib/dolphin/expected/builtin_funcs/db_b_if.out +++ b/contrib/dolphin/expected/builtin_funcs/db_b_if.out @@ -1,7 +1,5 @@ -drop database if exists db_b_if; -NOTICE: database "db_b_if" does not exist, skipping -create database db_b_if dbcompatibility 'B'; -\c db_b_if +create schema db_b_if; +set current_schema to 'db_b_if'; select if(TRUE, 1, 2); case ------ @@ -23,7 +21,7 @@ CONTEXT: referenced column: case -- '2022-01-30' is text, date '2022-01-30' is date CREATE VIEW test_view as select '2022-01-30' as text_type, date '2022-01-30' as date_type; \d+ test_view - View "public.test_view" + View "db_b_if.test_view" Column | Type | Modifiers | Storage | Description -----------+------+-----------+----------+------------- text_type | text | | extended | @@ -193,5 +191,6 @@ ERROR: CASE types boolean and numeric cannot be matched LINE 1: select if (true, 2.2::numeric(10, 2), true) as a, if (false,... ^ CONTEXT: referenced column: a -\c postgres -drop database if exists db_b_if; +drop schema db_b_if cascade; +NOTICE: drop cascades to view test_view +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/elt.out b/contrib/dolphin/expected/builtin_funcs/elt.out index 77570ed73..fbb397c84 100644 --- a/contrib/dolphin/expected/builtin_funcs/elt.out +++ b/contrib/dolphin/expected/builtin_funcs/elt.out @@ -1,7 +1,5 @@ -drop database if exists db_elt; -NOTICE: database "db_elt" does not exist, skipping -create database db_elt dbcompatibility 'B'; -\c db_elt +create schema db_elt; +set current_schema to 'db_elt'; select elt(1,1); elt ----- @@ -110,5 +108,5 @@ select elt(1,'a',2); a (1 row) -\c postgres -drop database if exists db_elt; +drop schema db_elt cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/field.out b/contrib/dolphin/expected/builtin_funcs/field.out index a98cfe412..d35dbbf9b 100644 --- a/contrib/dolphin/expected/builtin_funcs/field.out +++ b/contrib/dolphin/expected/builtin_funcs/field.out @@ -1,7 +1,5 @@ -drop database if exists db_field; -NOTICE: database "db_field" does not exist, skipping -create database db_field dbcompatibility 'B'; -\c db_field +create schema db_field; +set current_schema to 'db_field'; select field(4,1,2,3,4); field ------- @@ -95,5 +93,5 @@ select field(' ','@',null,' ','',' '); 3 (1 row) -\c postgres -drop database if exists db_field; +drop schema db_field cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/find_in_set.out b/contrib/dolphin/expected/builtin_funcs/find_in_set.out index e0c760a5b..15c4f2674 100644 --- a/contrib/dolphin/expected/builtin_funcs/find_in_set.out +++ b/contrib/dolphin/expected/builtin_funcs/find_in_set.out @@ -1,7 +1,5 @@ -drop database if exists db_find_in_set; -NOTICE: database "db_find_in_set" does not exist, skipping -create database db_find_in_set dbcompatibility 'B'; -\c db_find_in_set +create schema db_find_in_set; +set current_schema to 'db_find_in_set'; select find_in_set(1,'a,1,c'); find_in_set ------------- @@ -92,5 +90,5 @@ select find_in_set(1.1,'a,1.2,c,qwee,1212,1.1'); 6 (1 row) -\c postgres -drop database if exists db_find_in_set; +drop schema db_find_in_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/make_set.out b/contrib/dolphin/expected/builtin_funcs/make_set.out index 02fa26bc7..ba4d2467d 100644 --- a/contrib/dolphin/expected/builtin_funcs/make_set.out +++ b/contrib/dolphin/expected/builtin_funcs/make_set.out @@ -1,7 +1,5 @@ -drop database if exists make_set; -NOTICE: database "make_set" does not exist, skipping -create database make_set dbcompatibility 'b'; -\c make_set +create schema make_set; +set current_schema to 'make_set'; set dolphin.sql_mode = ''; select make_set(3, 'a', 'b', 'c'); make_set @@ -141,5 +139,5 @@ select make_set(3,01/02/03, false, true, false); 0.166666666666667,0 (1 row) -\c postgres -drop database make_set +drop schema make_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/not_between.out b/contrib/dolphin/expected/builtin_funcs/not_between.out index ae5fc133b..2d75cdb46 100644 --- a/contrib/dolphin/expected/builtin_funcs/not_between.out +++ b/contrib/dolphin/expected/builtin_funcs/not_between.out @@ -1,7 +1,5 @@ -drop database if exists db_not_between; -NOTICE: database "db_not_between" does not exist, skipping -create database db_not_between dbcompatibility 'B'; -\c db_not_between +create schema db_not_between; +set current_schema to 'db_not_between'; select 2 not between 2 and 23; ?column? ---------- @@ -176,5 +174,5 @@ select 1 not between '1测' and '1'; f (1 row) -\c postgres -drop database if exists db_not_between; +drop schema db_not_between cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/soundex.out b/contrib/dolphin/expected/builtin_funcs/soundex.out index 45d3583cd..2fba64913 100644 --- a/contrib/dolphin/expected/builtin_funcs/soundex.out +++ b/contrib/dolphin/expected/builtin_funcs/soundex.out @@ -1,7 +1,5 @@ -drop database if exists db_soundex; -NOTICE: database "db_soundex" does not exist, skipping -create database db_soundex dbcompatibility 'B'; -\c db_soundex +create schema db_soundex; +set current_schema to 'db_soundex'; select soundex('abc'); soundex --------- @@ -236,5 +234,5 @@ select soundex('测T测h测试o测masёння я такая шчасліваяhe 测3524 (1 row) -\c postgres -drop database if exists db_soundex; +drop schema db_soundex cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/builtin_funcs/space.out b/contrib/dolphin/expected/builtin_funcs/space.out index 27b5e46e2..0fd01b32c 100644 --- a/contrib/dolphin/expected/builtin_funcs/space.out +++ b/contrib/dolphin/expected/builtin_funcs/space.out @@ -1,7 +1,5 @@ -drop database if exists db_space; -NOTICE: database "db_space" does not exist, skipping -create database db_space dbcompatibility 'B'; -\c db_space +create schema db_space; +set current_schema to 'db_space'; select space('a'); space ------- @@ -68,5 +66,5 @@ select space(b'111'); (1 row) -\c postgres -drop database if exists db_space; +drop schema db_space cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/connection_id.out b/contrib/dolphin/expected/connection_id.out index b333ca23c..2ae1a10c3 100644 --- a/contrib/dolphin/expected/connection_id.out +++ b/contrib/dolphin/expected/connection_id.out @@ -1,7 +1,5 @@ -drop database if exists test_connection_id; -NOTICE: database "test_connection_id" does not exist, skipping -create database test_connection_id dbcompatibility 'b'; -\c test_connection_id +create schema test_connection_id; +set current_schema to 'test_connection_id'; -- 测试返回连接的ID SELECT CONNECTION_ID(); connection_id @@ -9,5 +7,5 @@ SELECT CONNECTION_ID(); --? (1 row) -\c postgres -drop database test_connection_id; +drop schema test_connection_id cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/conv_cast_test.out b/contrib/dolphin/expected/conv_cast_test.out index 71c76f602..7d9d3615b 100644 --- a/contrib/dolphin/expected/conv_cast_test.out +++ b/contrib/dolphin/expected/conv_cast_test.out @@ -1,9 +1,5 @@ --- b compatibility case -drop database if exists conv_cast_test; -NOTICE: database "conv_cast_test" does not exist, skipping --- create database conv_cast_test dbcompatibility 'b'; -create database conv_cast_test with DBCOMPATIBILITY = 'B'; -\c conv_cast_test +create schema conv_cast_test; +set current_schema to 'conv_cast_test'; select conv(-211111111111111111111111111111111111111111111111111111111177777,10,8); conv @@ -404,5 +400,5 @@ select cast(b'11111111111111111111111111111111111111111111111111111111111111111' (1 row) -\c postgres -drop database conv_cast_test; +drop schema conv_cast_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/call_function.out b/contrib/dolphin/expected/create_function_test/call_function.out old mode 100755 new mode 100644 index fd471e32e..a879a18cf --- a/contrib/dolphin/expected/create_function_test/call_function.out +++ b/contrib/dolphin/expected/create_function_test/call_function.out @@ -1,7 +1,5 @@ -drop database if exists db_func_call1; -NOTICE: database "db_func_call1" does not exist, skipping -create database db_func_call1 dbcompatibility 'B'; -\c db_func_call1 +create schema db_func_call1; +set current_schema to 'db_func_call1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; call f1('a'); f1 @@ -98,5 +96,11 @@ call f_3(); A (1 row) -\c postgres -drop database if exists db_func_call1; +drop schema db_func_call1 cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table tb_object0015 +drop cascades to table tb_object0015_01 +drop cascades to function tri_1() +drop cascades to function f_2() +drop cascades to function f_3() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/commentsharp.out b/contrib/dolphin/expected/create_function_test/commentsharp.out index 65fac3f68..2a55f3628 100644 --- a/contrib/dolphin/expected/create_function_test/commentsharp.out +++ b/contrib/dolphin/expected/create_function_test/commentsharp.out @@ -1,7 +1,5 @@ -drop database if exists db_comment_sharp; -NOTICE: database "db_comment_sharp" does not exist, skipping -create database db_comment_sharp dbcompatibility 'B'; -\c db_comment_sharp +create schema db_comment_sharp; +set current_schema to 'db_comment_sharp'; create table t1(a int,b int); insert into t1 values(10,11); insert into t1 values(12,13); @@ -185,5 +183,14 @@ LINE 3: for each row# drop table t_test; drop table t_test2; drop table t_test3; -\c postgres -drop database if exists db_comment_sharp; +drop schema db_comment_sharp cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table t1 +drop cascades to table "t1#t2" +drop cascades to table t3 +drop cascades to table t4 +drop cascades to table test +drop cascades to function testfunc3() +drop cascades to function testfunc2() +drop cascades to function testfunc() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/deterministic.out b/contrib/dolphin/expected/create_function_test/deterministic.out old mode 100755 new mode 100644 index 3d4e27de7..f4b20d971 --- a/contrib/dolphin/expected/create_function_test/deterministic.out +++ b/contrib/dolphin/expected/create_function_test/deterministic.out @@ -1,7 +1,5 @@ -drop database if exists db_func_1; -NOTICE: database "db_func_1" does not exist, skipping -create database db_func_1 dbcompatibility 'B'; -\c db_func_1 +create schema db_func_1; +set current_schema to 'db_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int NOT DETERMINISTIC AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int DETERMINISTIC AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int DETERMINISTIC LANGUAGE SQL AS $$ select s; $$ ; @@ -30,5 +28,10 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_1; +drop schema db_func_1 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/language_sql.out b/contrib/dolphin/expected/create_function_test/language_sql.out old mode 100755 new mode 100644 index ad4503004..0b9d0dddf --- a/contrib/dolphin/expected/create_function_test/language_sql.out +++ b/contrib/dolphin/expected/create_function_test/language_sql.out @@ -1,7 +1,5 @@ -drop database if exists db_func_2; -NOTICE: database "db_func_2" does not exist, skipping -create database db_func_2 dbcompatibility 'B'; -\c db_func_2 +create schema db_func_2; +set current_schema to 'db_func_2'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int LANGUAGE SQL AS $$ select s; $$ ; @@ -164,5 +162,17 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_2; +drop schema db_func_2 cascade; +NOTICE: drop cascades to 11 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +drop cascades to table language_1093039 +drop cascades to function fun_1(integer,integer,character varying) +drop cascades to function fun_2(integer,integer,character varying) +drop cascades to function fun_3(integer,integer,character varying) +drop cascades to function pro_1(integer,integer,character varying) +drop cascades to function pro_2(integer,integer,character varying) +drop cascades to function pro_3(integer,integer,character varying) +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/m_type_create_proc.out b/contrib/dolphin/expected/create_function_test/m_type_create_proc.out index 52241daad..9392bdfff 100644 --- a/contrib/dolphin/expected/create_function_test/m_type_create_proc.out +++ b/contrib/dolphin/expected/create_function_test/m_type_create_proc.out @@ -1,7 +1,5 @@ -drop database if exists m_create_proc_type; -NOTICE: database "m_create_proc_type" does not exist, skipping -create database m_create_proc_type dbcompatibility 'B'; -\c m_create_proc_type +create schema m_create_proc_type; +set current_schema to 'm_create_proc_type'; --test create procedure delimiter // CREATE PROCEDURE p() @@ -347,5 +345,24 @@ call doempty(); (1 row) -\c postgres -drop database m_create_proc_type; +drop schema m_create_proc_type cascade; +NOTICE: drop cascades to 18 other objects +DETAIL: drop cascades to function p() +drop cascades to function proc1() +drop cascades to table item +drop cascades to function procedure_1(integer,integer) +drop cascades to function procedure_1_m(integer,integer) +drop cascades to table test +drop cascades to function testpro(integer) +drop cascades to function testpro_m(integer) +drop cascades to function testpro_m6(integer) +drop cascades to function testpro_m7(integer) +drop cascades to function procedure_1_m_o(integer,integer) +drop cascades to function procedure_2_m_o() +drop cascades to table test9 +drop cascades to function test_proc() +drop cascades to function doiterate(integer) +drop cascades to function dorepeat(integer) +drop cascades to function docase(integer) +drop cascades to function doempty() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/single_line_proc.out b/contrib/dolphin/expected/create_function_test/single_line_proc.out index e50e0baa7..8eeb1c6f1 100644 --- a/contrib/dolphin/expected/create_function_test/single_line_proc.out +++ b/contrib/dolphin/expected/create_function_test/single_line_proc.out @@ -1,7 +1,5 @@ -drop database if exists db_func_call_2; -NOTICE: database "db_func_call_2" does not exist, skipping -create database db_func_call_2 dbcompatibility 'B'; -\c db_func_call_2 +create schema db_func_call_2; +set current_schema to 'db_func_call_2'; create table t1 (a int); create table t2 (a int); insert into t1 values(1),(2),(3); @@ -231,5 +229,22 @@ LINE 2: select z from tz; QUERY: select z from tz; -\c regress -\connect: FATAL: database "regress" does not exist +drop schema db_func_call_2 cascade; +NOTICE: drop cascades to 16 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to function proc1() +drop cascades to function proc2() +drop cascades to function proc3() +drop cascades to function proc4() +drop cascades to function proc5(integer) +drop cascades to function proc6() +drop cascades to function proc7() +drop cascades to function proc8() +drop cascades to function proc9() +drop cascades to table pbu_trade_collect_mbr_m +drop cascades to function proc10() +drop cascades to table base_info_pbu_trade +drop cascades to table base_info_pbu_org +drop cascades to function proc11() +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/sql_options.out b/contrib/dolphin/expected/create_function_test/sql_options.out old mode 100755 new mode 100644 index f608d466c..ce45c4b6b --- a/contrib/dolphin/expected/create_function_test/sql_options.out +++ b/contrib/dolphin/expected/create_function_test/sql_options.out @@ -1,7 +1,5 @@ -drop database if exists db_func_3; -NOTICE: database "db_func_3" does not exist, skipping -create database db_func_3 dbcompatibility 'B'; -\c db_func_3 +create schema db_func_3; +set current_schema to 'db_func_3'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int CONTAINS SQL DETERMINISTIC AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int CONTAINS SQL LANGUAGE SQL AS $$ select s; $$ ; @@ -94,5 +92,10 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_3; +drop schema db_func_3 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +reset current_schema; diff --git a/contrib/dolphin/expected/create_function_test/sql_security.out b/contrib/dolphin/expected/create_function_test/sql_security.out old mode 100755 new mode 100644 index 2d2facce4..4cc467715 --- a/contrib/dolphin/expected/create_function_test/sql_security.out +++ b/contrib/dolphin/expected/create_function_test/sql_security.out @@ -1,7 +1,5 @@ -drop database if exists db_func_4; -NOTICE: database "db_func_4" does not exist, skipping -create database db_func_4 dbcompatibility 'B'; -\c db_func_4 +create schema db_func_4; +set current_schema to 'db_func_4'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int SQL SECURITY DEFINER AS $$ select 1 $$ ; CREATE FUNCTION f2 (s int) RETURNS int NO SQL SQL SECURITY DEFINER AS $$ select s; $$ ; CREATE FUNCTION f3 (s int) RETURNS int SQL SECURITY INVOKER READS SQL DATA LANGUAGE SQL AS $$ select s; $$ ; @@ -30,5 +28,10 @@ call f4(4); 4 (1 row) -\c postgres -drop database if exists db_func_4; +drop schema db_func_4 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to function f1(character) +drop cascades to function f2(integer) +drop cascades to function f3(integer) +drop cascades to function f4(integer) +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions.out b/contrib/dolphin/expected/db_b_date_time_functions.out index a3b466162..015bf8178 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions.out +++ b/contrib/dolphin/expected/db_b_date_time_functions.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test1; +set current_schema to 'b_datetime_func_test1'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; -- test part-one function @@ -578,5 +575,5 @@ select * from test_datetime; (3 rows) drop table test_datetime; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test1 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions2.out b/contrib/dolphin/expected/db_b_date_time_functions2.out index 1d8aa3cef..e360638fd 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions2.out +++ b/contrib/dolphin/expected/db_b_date_time_functions2.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test2; +set current_schema to 'b_datetime_func_test2'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -1411,5 +1408,5 @@ select * from test order by funcname; (334 rows) drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions3.out b/contrib/dolphin/expected/db_b_date_time_functions3.out index ea87b8463..fbd34ef0d 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions3.out +++ b/contrib/dolphin/expected/db_b_date_time_functions3.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test3; +set current_schema to 'b_datetime_func_test3'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -977,5 +974,5 @@ select * from test order by funcname; (309 rows) drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test3 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_date_time_functions4.out b/contrib/dolphin/expected/db_b_date_time_functions4.out index 9c0062545..77e106406 100644 --- a/contrib/dolphin/expected/db_b_date_time_functions4.out +++ b/contrib/dolphin/expected/db_b_date_time_functions4.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -NOTICE: database "b_datetime_func_test" does not exist, skipping -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test4; +set current_schema to 'b_datetime_func_test4'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -799,5 +796,5 @@ select * from test order by funcname; (269 rows) drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; +drop schema b_datetime_func_test4 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_new_gram_test.out b/contrib/dolphin/expected/db_b_new_gram_test.out index c7c401bc8..249400d29 100644 --- a/contrib/dolphin/expected/db_b_new_gram_test.out +++ b/contrib/dolphin/expected/db_b_new_gram_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_new_gram_test; -NOTICE: database "db_b_new_gram_test" does not exist, skipping -create database db_b_new_gram_test dbcompatibility 'B'; -\c db_b_new_gram_test +create schema db_b_new_gram_test; +set current_schema to 'db_b_new_gram_test'; -- CREATE TABLE engine test CREATE TABLE test_engine_1 (a int) engine = InnoDB; CREATE TABLE IF NOT EXISTS test_engine_2 (a int) engine = InnoDB; @@ -19,7 +17,7 @@ CREATE TABLE test_engine_as engine = InnoDB as select a from test_engine_1; -- CREATE TABLE COMPRESSION test CREATE TABLE test_compression_1_pglz (a int) COMPRESSION = pglz; \d+ test_compression_1_pglz - Table "public.test_compression_1_pglz" + Table "db_b_new_gram_test.test_compression_1_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -28,7 +26,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE IF NOT EXISTS test_compression_create_2_pglz (a int) COMPRESSION = pglz; \d+ test_compression_create_2_pglz - Table "public.test_compression_create_2_pglz" + Table "db_b_new_gram_test.test_compression_create_2_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -37,7 +35,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE test_compression_type_table_1_pglz OF test_engine_type1 COMPRESSION = pglz; \d+ test_compression_type_table_1_pglz - Table "public.test_compression_type_table_1_pglz" + Table "db_b_new_gram_test.test_compression_type_table_1_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -48,7 +46,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE IF NOT EXISTS test_compression_create_type_table_2_pglz OF test_engine_type2 COMPRESSION = pglz; \d+ test_compression_create_type_table_2_pglz - Table "public.test_compression_create_type_table_2_pglz" + Table "db_b_new_gram_test.test_compression_create_type_table_2_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -59,7 +57,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE test_compression_as_pglz COMPRESSION = pglz as select a from test_compression_1_pglz; \d+ test_compression_as_pglz - Table "public.test_compression_as_pglz" + Table "db_b_new_gram_test.test_compression_as_pglz" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -68,7 +66,7 @@ Options: orientation=row, compresstype=1 CREATE TABLE test_compression_1_zstd (a int) COMPRESSION = zstd; \d+ test_compression_1_zstd - Table "public.test_compression_1_zstd" + Table "db_b_new_gram_test.test_compression_1_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -77,7 +75,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE IF NOT EXISTS test_compression_create_2_zstd (a int) COMPRESSION = zstd; \d+ test_compression_create_2_zstd - Table "public.test_compression_create_2_zstd" + Table "db_b_new_gram_test.test_compression_create_2_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -86,7 +84,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE test_compression_type_table_1_zstd OF test_engine_type1 COMPRESSION = zstd; \d+ test_compression_type_table_1_zstd - Table "public.test_compression_type_table_1_zstd" + Table "db_b_new_gram_test.test_compression_type_table_1_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -97,7 +95,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE IF NOT EXISTS test_compression_create_type_table_2_zstd OF test_engine_type2 COMPRESSION = zstd; \d+ test_compression_create_type_table_2_zstd - Table "public.test_compression_create_type_table_2_zstd" + Table "db_b_new_gram_test.test_compression_create_type_table_2_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -108,7 +106,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE test_compression_as_zstd COMPRESSION = zstd as select a from test_compression_1_zstd; \d+ test_compression_as_zstd - Table "public.test_compression_as_zstd" + Table "db_b_new_gram_test.test_compression_as_zstd" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -117,7 +115,7 @@ Options: orientation=row, compresstype=2 CREATE TABLE test_compression_1_none (a int) COMPRESSION = 'none'; \d+ test_compression_1_none - Table "public.test_compression_1_none" + Table "db_b_new_gram_test.test_compression_1_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -126,7 +124,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE IF NOT EXISTS test_compression_create_2_none (a int) COMPRESSION = 'none'; \d+ test_compression_create_2_none - Table "public.test_compression_create_2_none" + Table "db_b_new_gram_test.test_compression_create_2_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -135,7 +133,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE test_compression_type_table_1_none OF test_engine_type1 COMPRESSION = 'none'; \d+ test_compression_type_table_1_none - Table "public.test_compression_type_table_1_none" + Table "db_b_new_gram_test.test_compression_type_table_1_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -146,7 +144,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE IF NOT EXISTS test_compression_create_type_table_2_none OF test_engine_type2 COMPRESSION = 'none'; \d+ test_compression_create_type_table_2_none - Table "public.test_compression_create_type_table_2_none" + Table "db_b_new_gram_test.test_compression_create_type_table_2_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+----------+--------------+------------- a | integer | | plain | | @@ -157,7 +155,7 @@ Options: orientation=row, compresstype=0, compression=no CREATE TABLE test_compression_as_none COMPRESSION = none as select a from test_compression_1_none; \d+ test_compression_as_none - Table "public.test_compression_as_none" + Table "db_b_new_gram_test.test_compression_as_none" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -196,7 +194,7 @@ PARTITION p3 VALUES (4000), PARTITION p4 VALUES (5000) ); \d+ test_list1 - Table "public.test_list1" + Table "db_b_new_gram_test.test_list1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- col1 | integer | | plain | | @@ -215,7 +213,7 @@ PARTITION p3 VALUES IN (4000), PARTITION p4 VALUES IN (5000) ); \d+ test_list2 - Table "public.test_list2" + Table "db_b_new_gram_test.test_list2" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- col1 | integer | | plain | | @@ -294,29 +292,29 @@ DROP TEMPORARY TABLE test_engine_1 CASCADE; -- new grammar test for analyze table CREATE TABLE t_new_analyze(c1 int, c2 text); ANALYZE NO_WRITE_TO_BINLOG TABLE t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------- - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------- + db_b_new_gram_test.t_new_analyze | analyze | status | OK (1 row) ANALYZE LOCAL TABLE t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------- - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------- + db_b_new_gram_test.t_new_analyze | analyze | status | OK (1 row) ANALYZE TABLE t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------- - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------- + db_b_new_gram_test.t_new_analyze | analyze | status | OK (1 row) ANALYZE TABLE t_not_exist, t_new_analyze; - Table | Op | Msg_type | Msg_text -----------------------+---------+----------+---------------------------------------------- - public.t_not_exist | analyze | Error | relation "public.t_not_exist" does not exist - public.t_not_exist | analyze | status | Operation failed - public.t_new_analyze | analyze | status | OK + Table | Op | Msg_type | Msg_text +----------------------------------+---------+----------+---------------------------------------------------------- + db_b_new_gram_test.t_not_exist | analyze | Error | relation "db_b_new_gram_test.t_not_exist" does not exist + db_b_new_gram_test.t_not_exist | analyze | status | Operation failed + db_b_new_gram_test.t_new_analyze | analyze | status | OK (3 rows) -- new grammar test for CREATE TABLESPACE @@ -535,7 +533,7 @@ drop user u_test_tbspace2; CREATE TABLESPACE test_tbspace ADD DATAFILE 'test_tbspace1'; CREATE TABLE t_tbspace(num int) TABLESPACE test_tbspace; \d t_tbspace - Table "public.t_tbspace" +Table "db_b_new_gram_test.t_tbspace" Column | Type | Modifiers --------+---------+----------- num | integer | @@ -547,7 +545,7 @@ CREATE TABLESPACE test_tbspace_ibd ADD DATAFILE 'test_tbspace1.ibd'; WARNING: Suffix ".ibd" of datafile path detected. The actual path will be renamed as "test_tbspace1_ibd" CREATE TABLE t_tbspace(num int) TABLESPACE test_tbspace_ibd; \d t_tbspace - Table "public.t_tbspace" +Table "db_b_new_gram_test.t_tbspace" Column | Type | Modifiers --------+---------+----------- num | integer | @@ -564,21 +562,21 @@ ERROR: Tablespace "test_tbspace_ibd_2" does not exist. -- new grammar test for RENAME [TO | AS] CREATE TABLE t_rename(c int); \d t_rename - Table "public.t_rename" +Table "db_b_new_gram_test.t_rename" Column | Type | Modifiers --------+---------+----------- c | integer | ALTER TABLE t_rename RENAME TO t_rename_to; \d t_rename_to - Table "public.t_rename_to" +Table "db_b_new_gram_test.t_rename_to" Column | Type | Modifiers --------+---------+----------- c | integer | ALTER TABLE t_rename_to RENAME AS t_rename_as; \d t_rename_as - Table "public.t_rename_as" +Table "db_b_new_gram_test.t_rename_as" Column | Type | Modifiers --------+---------+----------- c | integer | @@ -591,7 +589,7 @@ CREATE TABLE t_index_new_grammar(c1 int, c2 int); CREATE INDEX test_index_btree_1 ON t_index_new_grammar USING btree(c1); CREATE INDEX test_index_btree_2 USING btree ON t_index_new_grammar(c2); \d t_index_new_grammar -Table "public.t_index_new_grammar" +Table "db_b_new_gram_test.t_index_new_grammar" Column | Type | Modifiers --------+---------+----------- c1 | integer | @@ -935,7 +933,7 @@ SELECT COUNT(*) FROM t_ctas_new; (1 row) \d t_ctas_new - Table "public.t_ctas_new" +Table "db_b_new_gram_test.t_ctas_new" Column | Type | Modifiers ---------+---------+----------- new_c_a | integer | @@ -950,7 +948,7 @@ SELECT COUNT(*) FROM t_ctas_new; (1 row) \d t_ctas_new - Table "public.t_ctas_new" +Table "db_b_new_gram_test.t_ctas_new" Column | Type | Modifiers ---------+---------+----------- new_c_a | integer | @@ -966,7 +964,7 @@ SELECT COUNT(*) FROM t_like; (1 row) \d t_like - Table "public.t_like" +Table "db_b_new_gram_test.t_like" Column | Type | Modifiers --------+---------+----------- a | integer | @@ -999,7 +997,7 @@ SELECT COUNT(*) FROM t_like2; (1 row) \d t_like2 - Table "public.t_like2" +Table "db_b_new_gram_test.t_like2" Column | Type | Modifiers --------+---------+----------- a | integer | @@ -1029,10 +1027,8 @@ SELECT COUNT(*) FROM t_ctas_new; DROP TABLE t_ctas_new; DROP TABLE t_ctas; -drop database if exists test_m; -NOTICE: database "test_m" does not exist, skipping -create database test_m dbcompatibility 'b'; -\c test_m +create schema test_m; +set current_schema to 'test_m'; create table test_unique( f1 int, f2 int, @@ -1044,7 +1040,7 @@ create table test_unique( NOTICE: CREATE TABLE / UNIQUE will create implicit index "u_idx_name" for table "test_unique" NOTICE: CREATE TABLE / UNIQUE will create implicit index "u_key_name" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "test_m.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -1207,6 +1203,8 @@ select * from ignore_range_range partition (p_201901, p_201905_b); (2 rows) drop table ignore_range_range; -\c postgres -drop database if exists test_m; -drop database db_b_new_gram_test; +drop schema test_m cascade; +NOTICE: drop cascades to table test_unique +drop schema db_b_new_gram_test cascade; +--?.* +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser1.out b/contrib/dolphin/expected/db_b_parser1.out index d657801f7..ba66c9033 100644 --- a/contrib/dolphin/expected/db_b_parser1.out +++ b/contrib/dolphin/expected/db_b_parser1.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser1; -NOTICE: database "db_b_parser1" does not exist, skipping -create database db_b_parser1 dbcompatibility 'b'; -\c db_b_parser1 +create schema db_b_parser1; +set current_schema to 'db_b_parser1'; select 'bbbbb' regexp '^([bc])\1*$' as t, 'bbbbb' not regexp '^([bc])\1*$' as t2, 'bbbbb' rlike '^([bc])\1*$' as t; t | t2 | t ---+----+--- @@ -356,5 +354,5 @@ select '-12.3abc' rlike 'null'; 0 (1 row) -\c postgres -drop database if exists db_b_parser1; +drop schema db_b_parser1 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser2.out b/contrib/dolphin/expected/db_b_parser2.out index a04872cca..fbb001fb0 100644 --- a/contrib/dolphin/expected/db_b_parser2.out +++ b/contrib/dolphin/expected/db_b_parser2.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser2; -NOTICE: database "db_b_parser2" does not exist, skipping -create database db_b_parser2 dbcompatibility 'b'; -\c db_b_parser2 +create schema db_b_parser2; +set current_schema to 'db_b_parser2'; --验证DAYOFMONTH() DAYOFWEEK() DAYOFYEAR() HOUR() MICROSECOND() MINUTE() QUARTER() SECOND() WEEKDAY() WEEKOFYEAR() YEAR() select DAYOFMONTH(datetime '2021-11-4 16:30:44.341191'); dayofmonth @@ -648,5 +646,5 @@ select fchar,length(fchar) from fchar_test order by 1,2; (1 row) drop table fchar_test; -\c postgres -drop database if exists db_b_parser2; +drop schema db_b_parser2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser3.out b/contrib/dolphin/expected/db_b_parser3.out index ba7bd8603..b6d3cc585 100644 --- a/contrib/dolphin/expected/db_b_parser3.out +++ b/contrib/dolphin/expected/db_b_parser3.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser3; -NOTICE: database "db_b_parser3" does not exist, skipping -create database db_b_parser3 dbcompatibility 'b'; -\c db_b_parser3 +create schema db_b_parser3; +set current_schema to 'db_b_parser3'; --测试点一:验证lcase函数 select lcase('ABc'), lcase('哈哈'), lcase('123456'),lcase('哈市&%%¥#'),lcase(null); lcase | lcase | lcase | lcase | lcase @@ -416,5 +414,8 @@ select acos(-1.000001); (1 row) -\c postgres -drop database if exists db_b_parser3; +drop schema db_b_parser3 cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table tb_db_b_parser0003 +drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_parser4.out b/contrib/dolphin/expected/db_b_parser4.out index f4942158c..af8153daa 100644 --- a/contrib/dolphin/expected/db_b_parser4.out +++ b/contrib/dolphin/expected/db_b_parser4.out @@ -1,7 +1,5 @@ -drop database if exists db_b_parser4; -NOTICE: database "db_b_parser4" does not exist, skipping -create database db_b_parser4 dbcompatibility 'b'; -\c db_b_parser4 +create schema db_b_parser4; +set current_schema to 'db_b_parser4'; --验证text类型 drop table if exists tb_db_b_parser_0001; NOTICE: table "tb_db_b_parser_0001" does not exist, skipping @@ -11,7 +9,7 @@ drop table if exists tb_db_b_parser_0002; NOTICE: table "tb_db_b_parser_0002" does not exist, skipping create table tb_db_b_parser_0002(a decimal, b number, c dec, d numeric, e fixed); \d tb_db_b_parser_0002 - Table "public.tb_db_b_parser_0002" +Table "db_b_parser4.tb_db_b_parser_0002" Column | Type | Modifiers --------+---------------+----------- a | numeric(10,0) | @@ -25,7 +23,7 @@ drop table if exists tb_default_float; NOTICE: table "tb_default_float" does not exist, skipping create table tb_default_float(a float4(10)); \d tb_default_float -Table "public.tb_default_float" +Table "db_b_parser4.tb_default_float" Column | Type | Modifiers --------+------+----------- a | real | @@ -34,7 +32,7 @@ drop table if exists tb_default_double; NOTICE: table "tb_default_double" does not exist, skipping create table tb_default_double(a double); \d tb_default_double - Table "public.tb_default_double" +Table "db_b_parser4.tb_default_double" Column | Type | Modifiers --------+------------------+----------- a | double precision | @@ -44,7 +42,7 @@ drop table if exists tb_real_float; NOTICE: table "tb_real_float" does not exist, skipping create table tb_real_float(a real, b float); \d tb_real_float -Table "public.tb_real_float" +Table "db_b_parser4.tb_real_float" Column | Type | Modifiers --------+------+----------- a | real | @@ -54,5 +52,6 @@ drop table if exists tb_db_b_parser_0002; drop table if exists tb_default_float; drop table if exists tb_default_double; drop table if exists tb_real_float; -\c postgres -drop database if exists db_b_parser4; +drop schema db_b_parser4 cascade; +NOTICE: drop cascades to table tb_db_b_parser_0001 +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_plpgsql_test.out b/contrib/dolphin/expected/db_b_plpgsql_test.out index a8b28cfa0..b8b4603a2 100644 --- a/contrib/dolphin/expected/db_b_plpgsql_test.out +++ b/contrib/dolphin/expected/db_b_plpgsql_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_plpgsql_test; -NOTICE: database "db_b_plpgsql_test" does not exist, skipping -create database db_b_plpgsql_test dbcompatibility 'b'; -\c db_b_plpgsql_test +create schema db_b_plpgsql_test; +set current_schema to 'db_b_plpgsql_test'; create table tb_b_grammar_0038(a text(10)) engine = 表1; create or replace procedure proc_01() as @@ -47,5 +45,11 @@ SELECT * from tb_b_grammar_0038; tom (1 row) -\c postgres -drop database if exists db_b_plpgsql_test; +drop schema db_b_plpgsql_test cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table tb_b_grammar_0038 +drop cascades to function proc_01() +drop cascades to table j1_tbl +drop cascades to table j2_tbl +drop cascades to function peoc_165() +reset current_schema; diff --git a/contrib/dolphin/expected/db_b_rename_user_test.out b/contrib/dolphin/expected/db_b_rename_user_test.out index 35d8b9fc7..d627e6b1b 100644 --- a/contrib/dolphin/expected/db_b_rename_user_test.out +++ b/contrib/dolphin/expected/db_b_rename_user_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_rename_user_test; -NOTICE: database "db_b_rename_user_test" does not exist, skipping -create database db_b_rename_user_test dbcompatibility 'b'; -\c db_b_rename_user_test +create schema db_b_rename_user_test; +set current_schema to 'db_b_rename_user_test'; CREATE USER user1 WITH ENCRYPTED PASSWORD 'user1@1234'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. CREATE USER user2 WITH ENCRYPTED PASSWORD 'user2@1234'; @@ -55,5 +53,5 @@ select usename from pg_catalog.pg_user drop user user4; drop user user5; drop user user6; -\c postgres -drop database if exists db_b_rename_user_test; +drop schema db_b_rename_user_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/default_guc.out b/contrib/dolphin/expected/default_guc.out index b178a6eb3..54b393425 100644 --- a/contrib/dolphin/expected/default_guc.out +++ b/contrib/dolphin/expected/default_guc.out @@ -1,7 +1,5 @@ -drop database if exists default_guc; -NOTICE: database "default_guc" does not exist, skipping -create database default_guc dbcompatibility 'b'; -\c default_guc +create schema default_guc; +set current_schema to 'default_guc'; show behavior_compat_options; behavior_compat_options ------------------------- @@ -51,5 +49,5 @@ select md5('0.123'); 677738b969d6037efce2c328c6814580 (1 row) -\c postgres -drop database if exists default_guc; +drop schema default_guc cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/describe.out b/contrib/dolphin/expected/describe.out index 1b57dec16..d9ec2283f 100644 --- a/contrib/dolphin/expected/describe.out +++ b/contrib/dolphin/expected/describe.out @@ -1,7 +1,5 @@ -drop database if exists db_describe; -NOTICE: database "db_describe" does not exist, skipping -create database db_describe dbcompatibility 'b'; -\c db_describe +create schema db_describe; +set current_schema to 'db_describe'; CREATE TABLE test2 ( id int PRIMARY KEY @@ -141,7 +139,7 @@ describe test; c | character varying(10) | NO | | NULL | (3 rows) -desc public.test; +desc db_describe.test; Field | Type | Null | Key | Default | Extra -------+-----------------------+------+-----+---------------------------------+------- a | integer | NO | PRI | nextval('test_a_seq'::regclass) | @@ -177,5 +175,9 @@ desc sc.test4; -------+------+------+-----+---------+------- (0 rows) -\c postgres -drop database if exists db_describe; +drop schema db_describe cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table db_describe.test2 +drop cascades to table db_describe.test3 +drop cascades to table db_describe.test +reset current_schema; diff --git a/contrib/dolphin/expected/empty_value_lists.out b/contrib/dolphin/expected/empty_value_lists.out index 282b0639d..c0affa4a8 100644 --- a/contrib/dolphin/expected/empty_value_lists.out +++ b/contrib/dolphin/expected/empty_value_lists.out @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists empty_value_lists; -NOTICE: database "empty_value_lists" does not exist, skipping -create database empty_value_lists dbcompatibility 'b'; -\c empty_value_lists +create schema empty_value_lists; +set current_schema to 'empty_value_lists'; create table test1(num int); create table test2(num int default 3); create table test3(num int not null); @@ -551,5 +548,51 @@ select * from m4; | | abc | bcd (2 rows) -\c postgres -drop database if exists empty_value_lists; +drop schema empty_value_lists cascade; +NOTICE: drop cascades to 45 other objects +DETAIL: drop cascades to table test1 +drop cascades to table test2 +drop cascades to table test3 +drop cascades to table test4 +drop cascades to table test5 +drop cascades to table test6 +drop cascades to table test7 +drop cascades to table test8 +drop cascades to table test9 +drop cascades to table test10 +drop cascades to table test11 +drop cascades to table test12 +drop cascades to table test13 +drop cascades to table test14 +drop cascades to table test15 +drop cascades to table test16 +drop cascades to table test17 +drop cascades to table test18 +drop cascades to table test19 +drop cascades to table test20 +drop cascades to table test21 +drop cascades to table test22 +drop cascades to table test23 +drop cascades to table test24 +drop cascades to table test25 +drop cascades to table test26 +drop cascades to table test27 +drop cascades to table test28 +drop cascades to table test29 +drop cascades to table test30 +drop cascades to table test31 +drop cascades to table test32 +drop cascades to table test33 +drop cascades to table test34 +drop cascades to table test35 +drop cascades to table test36 +drop cascades to table test37 +drop cascades to table test38 +drop cascades to table test39 +drop cascades to table test40 +drop cascades to table test41 +drop cascades to table m1 +drop cascades to table m2 +drop cascades to table m3 +drop cascades to table m4 +reset current_schema; diff --git a/contrib/dolphin/expected/empty_value_support_value.out b/contrib/dolphin/expected/empty_value_support_value.out index ff1eb4700..1c92f4b04 100644 --- a/contrib/dolphin/expected/empty_value_support_value.out +++ b/contrib/dolphin/expected/empty_value_support_value.out @@ -1,7 +1,5 @@ -drop database if exists empty_value_support_value; -NOTICE: database "empty_value_support_value" does not exist, skipping -create database empty_value_support_value dbcompatibility 'b'; -\c empty_value_support_value +create schema empty_value_support_value; +set current_schema to 'empty_value_support_value'; create table test1(num int not null); insert into test1 value(); ERROR: null value in column "num" violates not-null constraint @@ -26,5 +24,6 @@ select * from test1; 0 (3 rows) -\c postgres -drop database if exists empty_value_support_value; +drop schema empty_value_support_value cascade; +NOTICE: drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/explain_desc.out b/contrib/dolphin/expected/explain_desc.out index 79edf4da9..96e980e5b 100644 --- a/contrib/dolphin/expected/explain_desc.out +++ b/contrib/dolphin/expected/explain_desc.out @@ -1,5 +1,5 @@ -create database db_explain_desc with dbcompatibility 'B'; -\c db_explain_desc +create schema db_explain_desc; +set current_schema to 'db_explain_desc'; create table ed_t(c1 int, c2 varchar(100), c3 int default 10); insert into ed_t values(generate_series(1, 10), 'hello', 100); -- 1.use explain to query table's info @@ -11,7 +11,7 @@ explain ed_t; c3 | integer | YES | | 10 | (3 rows) -explain public.ed_t; +explain db_explain_desc.ed_t; Field | Type | Null | Key | Default | Extra -------+------------------------+------+-----+---------+------- c1 | integer | YES | | NULL | @@ -424,5 +424,5 @@ explain format='TraDitional' delete from ed_t where c1 < 5; (3 rows) drop table ed_t; -\c postgres -drop database db_explain_desc; +drop schema db_explain_desc cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/export_set.out b/contrib/dolphin/expected/export_set.out index 45374c2a5..4b27aa843 100644 --- a/contrib/dolphin/expected/export_set.out +++ b/contrib/dolphin/expected/export_set.out @@ -1,7 +1,5 @@ -drop database if exists export_set; -NOTICE: database "export_set" does not exist, skipping -create database export_set dbcompatibility 'b'; -\c export_set +create schema export_set; +set current_schema to 'export_set'; -- 测试缺省值 SELECT EXPORT_SET(5,'Y','N',',',5); export_set @@ -122,5 +120,5 @@ SELECT EXPORT_SET(5,'Y','N',',,,,,,,,,,,,',5); Y,,,,,,,,,,,,N,,,,,,,,,,,,Y,,,,,,,,,,,,N,,,,,,,,,,,,N (1 row) -\c postgres -drop database if exists export_set; +drop schema export_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out b/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out index 9565c3ad2..2225b51c5 100644 --- a/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out +++ b/contrib/dolphin/expected/float_numeric_test/db_b_log_test.out @@ -1,13 +1,17 @@ +drop database if exists db_b_log_test; +NOTICE: database "db_b_log_test" does not exist, skipping +create database db_b_log_test dbcompatibility 'A'; +\c db_b_log_test SELECT LOG(10); log ----- 1 (1 row) -drop database if exists db_b_log_test; -NOTICE: database "db_b_log_test" does not exist, skipping -create database db_b_log_test dbcompatibility 'B'; -\c db_b_log_test +\c contrib_regression +drop database db_b_log_test; +create schema db_b_log_test; +set current_schema to 'db_b_log_test'; SELECT LOG(10); log ------------------ @@ -218,5 +222,5 @@ select log10(b'111'::int); 0.845098040014257 (1 row) -\c postgres -drop database db_b_log_test; +drop schema db_b_log_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out b/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out index d2f59e5e0..4278a612c 100644 --- a/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out +++ b/contrib/dolphin/expected/float_numeric_test/db_b_sqrt_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_sqrt_test; +NOTICE: database "db_b_sqrt_test" does not exist, skipping +create database db_b_sqrt_test dbcompatibility 'A'; +\c db_b_sqrt_test SELECT SQRT(64); sqrt ------ @@ -7,10 +11,10 @@ SELECT SQRT(64); SELECT SQRT(-64); ERROR: cannot take square root of a negative number CONTEXT: referenced column: sqrt -drop database if exists db_b_sqrt_test; -NOTICE: database "db_b_sqrt_test" does not exist, skipping -create database db_b_sqrt_test dbcompatibility 'B'; -\c db_b_sqrt_test +\c contrib_regression +drop database db_b_sqrt_test; +create schema db_b_sqrt_test; +set current_schema to 'db_b_sqrt_test'; SELECT SQRT(64); sqrt ------ @@ -59,5 +63,5 @@ select sqrt(b'111'::int); 2.64575131106459 (1 row) -\c postgres -drop database db_b_sqrt_test; +drop schema db_b_sqrt_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/flush.out b/contrib/dolphin/expected/flush.out index 949b641cd..1903c3f75 100644 --- a/contrib/dolphin/expected/flush.out +++ b/contrib/dolphin/expected/flush.out @@ -1,12 +1,10 @@ -drop database if exists db_flush; -NOTICE: database "db_flush" does not exist, skipping -create database db_flush dbcompatibility 'b'; -\c db_flush +create schema db_flush; +set current_schema to 'db_flush'; FLUSH BINARY LOGS; --? pg_switch_xlog --?.* --?.* (1 row) -\c postgres -drop database if exists db_flush; +drop schema db_flush cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/get_b_database.out b/contrib/dolphin/expected/get_b_database.out index 3d08b6036..ce4a1caae 100644 --- a/contrib/dolphin/expected/get_b_database.out +++ b/contrib/dolphin/expected/get_b_database.out @@ -1,11 +1,9 @@ -drop database if exists get_db; -NOTICE: database "get_db" does not exist, skipping -create database get_db dbcompatibility 'b'; -\c get_db +create schema get_db; +set current_schema to 'get_db'; select database(); database ---------- - public + get_db (1 row) create schema testdb; @@ -30,5 +28,5 @@ select database(); testdb1 (1 row) -\c postgres -drop database if exists get_db; +drop schema get_db cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/greatest_least.out b/contrib/dolphin/expected/greatest_least.out index 799466e15..9ea11b371 100644 --- a/contrib/dolphin/expected/greatest_least.out +++ b/contrib/dolphin/expected/greatest_least.out @@ -1,7 +1,5 @@ -drop database if exists greatest_least; -NOTICE: database "greatest_least" does not exist, skipping -create database greatest_least dbcompatibility 'b'; -\c greatest_least +create schema greatest_least; +set current_schema to 'greatest_least'; --return null if input include null select GREATEST(null,1,2), GREATEST(null,1,2) is null; greatest | ?column? @@ -28,5 +26,5 @@ select LEAST(1,2); 1 (1 row) -\c postgres -drop database if exists greatest_least; +drop schema greatest_least cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/group_concat_test.out b/contrib/dolphin/expected/group_concat_test.out index 7387479cb..0c84da59a 100644 --- a/contrib/dolphin/expected/group_concat_test.out +++ b/contrib/dolphin/expected/group_concat_test.out @@ -1,5 +1,5 @@ -create database t dbcompatibility 'B'; -\c t; +create schema t; +set current_schema to 't'; create table t(id text, v text); insert into t(id, v) values('1','a'),('2','b'),('1','c'),('2','d'); select group_concat(id,v separator ';') into tmp_table from t; @@ -19,9 +19,12 @@ explain verbose select id, group_concat(VARIADIC ARRAY[id,':',v] order by id) as -> Sort (cost=61.11..63.28 rows=869 width=64) Output: id, v Sort Key: t.id - -> Seq Scan on public.t (cost=0.00..18.69 rows=869 width=64) + -> Seq Scan on t.t (cost=0.00..18.69 rows=869 width=64) Output: id, v (8 rows) -\c postgres -drop database t; +drop schema t cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t +drop cascades to table tmp_table +reset current_schema; diff --git a/contrib/dolphin/expected/if_not_exists_test.out b/contrib/dolphin/expected/if_not_exists_test.out index f31a01c07..5a527022b 100644 --- a/contrib/dolphin/expected/if_not_exists_test.out +++ b/contrib/dolphin/expected/if_not_exists_test.out @@ -1,7 +1,5 @@ -drop database if exists test_if_not_exists; -NOTICE: database "test_if_not_exists" does not exist, skipping -create database test_if_not_exists dbcompatibility 'B'; -\c test_if_not_exists +create schema test_if_not_exists; +set current_schema to 'test_if_not_exists'; CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; @@ -11,5 +9,5 @@ NOTICE: role "zzz" already exists DROP USER ZZZ; CREATE USER IF NOT EXISTS ZZZ WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. -\c postgres -drop database test_if_not_exists; +drop schema test_if_not_exists cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/implicit_cast.out b/contrib/dolphin/expected/implicit_cast.out index 40034d726..f6a1cfad6 100644 --- a/contrib/dolphin/expected/implicit_cast.out +++ b/contrib/dolphin/expected/implicit_cast.out @@ -1,7 +1,5 @@ -drop database if exists implicit_cast; -NOTICE: database "implicit_cast" does not exist, skipping -create database implicit_cast dbcompatibility 'b'; -\c implicit_cast +create schema implicit_cast; +set current_schema to 'implicit_cast'; select 1::int1 % 1::float4; ?column? ---------- @@ -290,5 +288,5 @@ select 1::int8 | 1::text; 1 (1 row) -\c postgres -drop database if exists implicit_cast; +drop schema implicit_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/insert_set.out b/contrib/dolphin/expected/insert_set.out index 737f33b81..e391cab24 100644 --- a/contrib/dolphin/expected/insert_set.out +++ b/contrib/dolphin/expected/insert_set.out @@ -1,7 +1,5 @@ -drop database if exists insert_set; -NOTICE: database "insert_set" does not exist, skipping -create database insert_set dbcompatibility 'B'; -\c insert_set +create schema insert_set; +set current_schema to 'insert_set'; create table test_figure(tinyint tinyint, smallint smallint, integer integer, binary_integer binary_integer, bigint bigint); insert into test_figure set bigint = 7234134, binary_integer = 1011101, integer = 10000, smallint = 1, tinyint = 3; select * from test_figure; @@ -95,5 +93,14 @@ select * from test_error; | 23 (1 row) -\c postgres -drop database insert_set; +drop schema insert_set cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table test_figure +drop cascades to table test_money +drop cascades to table test_boolean +drop cascades to table test_char +drop cascades to table test_binary +drop cascades to table test_time +drop cascades to table test_netid +drop cascades to table test_error +reset current_schema; diff --git a/contrib/dolphin/expected/join_without_on.out b/contrib/dolphin/expected/join_without_on.out index 0e69cd9c6..5be7016d9 100644 --- a/contrib/dolphin/expected/join_without_on.out +++ b/contrib/dolphin/expected/join_without_on.out @@ -1,7 +1,5 @@ -drop database if exists join_without_on; -NOTICE: database "join_without_on" does not exist, skipping -create database join_without_on dbcompatibility 'b'; -\c join_without_on +create schema join_without_on; +set current_schema to 'join_without_on'; CREATE TABLE J1_TBL ( i integer, j integer, @@ -136,5 +134,10 @@ SELECT * FROM J1_TBL JOIN J2_TBL INNER JOIN J3_TBL INNER JOIN J4_TBL ON J1_TBL 1 | 4 | one | 1 | -1 | 2 | 2 | 1 | -1 (4 rows) -\c postgres -drop database if exists join_without_on; +drop schema join_without_on cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table j1_tbl +drop cascades to table j2_tbl +drop cascades to table j3_tbl +drop cascades to table j4_tbl +reset current_schema; diff --git a/contrib/dolphin/expected/json_array.out b/contrib/dolphin/expected/json_array.out index 21ce52181..4be0dd043 100644 --- a/contrib/dolphin/expected/json_array.out +++ b/contrib/dolphin/expected/json_array.out @@ -1,7 +1,5 @@ -drop database if exists test_json_array; -NOTICE: database "test_json_array" does not exist, skipping -create database test_json_array dbcompatibility 'B'; -\c test_json_array +create schema test_json_array; +set current_schema to 'test_json_array'; select json_array(1,2,3,4); json_array -------------- @@ -77,5 +75,5 @@ select name from dataa; (1 row) drop table dataa; -\c postgres -drop database if exists test_json_array; +drop schema test_json_array cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_array_append.out b/contrib/dolphin/expected/json_array_append.out index 840ff387d..05f21f96d 100644 --- a/contrib/dolphin/expected/json_array_append.out +++ b/contrib/dolphin/expected/json_array_append.out @@ -1,7 +1,5 @@ -drop database if exists test_json_array_append; -NOTICE: database "test_json_array_append" does not exist, skipping -create database test_json_array_append dbcompatibility'B'; -\c test_json_array_append +create schema test_json_array_append; +set current_schema to 'test_json_array_append'; select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[1]', 4); json_array_append ------------------- @@ -184,5 +182,5 @@ CONTEXT: referenced column: json_array_append select JSON_ARRAY_APPEND('[1, [2, 3]]', ' ', 4); ERROR: Invalid JSON path expression. The error is around argument 1. CONTEXT: referenced column: json_array_append -\c postgres -drop database if exists test_json_array_append; +drop schema test_json_array_append cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_array_insert.out b/contrib/dolphin/expected/json_array_insert.out index a2cf34388..d81b94b91 100644 --- a/contrib/dolphin/expected/json_array_insert.out +++ b/contrib/dolphin/expected/json_array_insert.out @@ -1,7 +1,5 @@ -drop database if exists test_json_array_insert; -NOTICE: database "test_json_array_insert" does not exist, skipping -create database test_json_array_insert dbcompatibility 'B'; -\c test_json_array_insert +create schema test_json_array_insert; +set current_schema to 'test_json_array_insert'; SELECT JSON_ARRAY_INSERT('[1, [2, 3], {"a": [4, 5]}]', '$[0]', 0); json_array_insert ------------------------------- @@ -204,5 +202,5 @@ SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', ' ', 4); ERROR: Invalid JSON path expression. The error is around argument 1. CONTEXT: referenced column: json_array_insert -\c postgres -drop database if exists test_json_array_insert; +drop schema test_json_array_insert cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_arrayagg.out b/contrib/dolphin/expected/json_arrayagg.out index 96dcc7e59..24a28e6a0 100644 --- a/contrib/dolphin/expected/json_arrayagg.out +++ b/contrib/dolphin/expected/json_arrayagg.out @@ -1,7 +1,5 @@ -drop database if exists json_arrayagg_test; -NOTICE: database "json_arrayagg_test" does not exist, skipping -create database json_arrayagg_test dbcompatibility 'B'; -\c json_arrayagg_test +create schema json_arrayagg_test; +set current_schema to 'json_arrayagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -85,5 +83,7 @@ select json_arrayagg(a) from time_table; ["08-22-2020", "10-01-2021", "12-04-2022"] (1 row) -\c postgres -drop database json_arrayagg_test; +drop schema json_arrayagg_test cascade; +NOTICE: drop cascades to table city +reset json_arrayagg_test; +ERROR: unrecognized configuration parameter "json_arrayagg_test" diff --git a/contrib/dolphin/expected/json_contains.out b/contrib/dolphin/expected/json_contains.out index 2641cf48c..d435a2637 100644 --- a/contrib/dolphin/expected/json_contains.out +++ b/contrib/dolphin/expected/json_contains.out @@ -1,7 +1,5 @@ -drop database if exists test_json_contains; -NOTICE: database "test_json_contains" does not exist, skipping -create database test_json_contains dbcompatibility 'b'; -\c test_json_contains +create schema test_json_contains; +set current_schema to 'test_json_contains'; select json_contains('1',null); json_contains --------------- @@ -369,5 +367,5 @@ select *, json_contains(target, candidate, path) from json_contains_test; (8 rows) drop table json_contains_test; -\c postgres; -drop database if exists test_json_contains; +drop schema test_json_contains cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_contains_path.out b/contrib/dolphin/expected/json_contains_path.out index 191aa0690..c303c7c19 100644 --- a/contrib/dolphin/expected/json_contains_path.out +++ b/contrib/dolphin/expected/json_contains_path.out @@ -1,7 +1,5 @@ -drop database if exists test_json_contains_path; -NOTICE: database "test_json_contains_path" does not exist, skipping -create database test_json_contains_path dbcompatibility 'b'; -\c test_json_contains_path +create schema test_json_contains_path; +set current_schema to 'test_json_contains_path'; select json_contains_path(null,'one','$[0]'); json_contains_path -------------------- @@ -178,5 +176,5 @@ select *, json_contains_path(target, mode, '$.a.d', '$.c.d') from json_contains_ (2 rows) drop table json_contains_path_test; -\c postgres; -drop database if exists test_json_contains_path; +drop schema test_json_contains_path cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_depth.out b/contrib/dolphin/expected/json_depth.out index cb57354e5..8a96092c6 100644 --- a/contrib/dolphin/expected/json_depth.out +++ b/contrib/dolphin/expected/json_depth.out @@ -1,7 +1,5 @@ -drop database if exists test_json_depth; -NOTICE: database "test_json_depth" does not exist, skipping -create database test_json_depth dbcompatibility 'B'; -\c test_json_depth +create schema test_json_depth; +set current_schema to 'test_json_depth'; select json_depth('{}'); json_depth ------------ @@ -142,5 +140,5 @@ select json_depth(data) from test1; (3 rows) drop table test1; -\c postgres -drop database if exists test_json_depth; +drop schema test_json_depth cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_extract.out b/contrib/dolphin/expected/json_extract.out index e3163be1e..787927314 100644 --- a/contrib/dolphin/expected/json_extract.out +++ b/contrib/dolphin/expected/json_extract.out @@ -1,7 +1,5 @@ -drop database if exists test_json_extract; -NOTICE: database "test_json_extract" does not exist, skipping -create database test_json_extract dbcompatibility'B'; -\c test_json_extract +create schema test_json_extract; +set current_schema to 'test_json_extract'; select json_extract('{"a": "lihua"}', '$.a'); json_extract -------------- @@ -109,5 +107,5 @@ select * from test; {"c": true} (5 rows) -\c postgres -drop database if exists test_json_extract; +drop schema test_json_extract cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_insert.out b/contrib/dolphin/expected/json_insert.out index 83549365c..81046931e 100644 --- a/contrib/dolphin/expected/json_insert.out +++ b/contrib/dolphin/expected/json_insert.out @@ -1,7 +1,5 @@ -drop database if exists test_json_insert; -NOTICE: database "test_json_insert" does not exist, skipping -create database test_json_insert dbcompatibility'B'; -\c test_json_insert +create schema test_json_insert; +set current_schema to 'test_json_insert'; -- test for basic functionality of json_replace select JSON_INSERT('{"a": 43}', '$.b', 55); json_insert @@ -206,5 +204,5 @@ select * from test; {"a": 43, "b": [{"c": true}, "Test"]} (5 rows) -\c postgres -drop database if exists test_json_insert; +drop schema test_json_insert cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_keys.out b/contrib/dolphin/expected/json_keys.out index 4bdb620ab..7209576b4 100644 --- a/contrib/dolphin/expected/json_keys.out +++ b/contrib/dolphin/expected/json_keys.out @@ -1,7 +1,5 @@ -drop database if exists test_json_keys; -NOTICE: database "test_json_keys" does not exist, skipping -create database test_json_keys dbcompatibility'B'; -\c test_json_keys +create schema test_json_keys; +set current_schema to 'test_json_keys'; SELECT JSON_KEYS('{"a":"t1"}'); json_keys ----------- @@ -139,5 +137,6 @@ select name from student; ["a", "b"] (1 row) -\c postgres -drop database if exists test_json_keys; +drop schema test_json_keys cascade; +NOTICE: drop cascades to table student +reset current_schema; diff --git a/contrib/dolphin/expected/json_length.out b/contrib/dolphin/expected/json_length.out index 4c31455c7..05cec8dbb 100644 --- a/contrib/dolphin/expected/json_length.out +++ b/contrib/dolphin/expected/json_length.out @@ -1,7 +1,5 @@ -drop database if exists test_json_length; -NOTICE: database "test_json_length" does not exist, skipping -create database test_json_length dbcompatibility 'B'; -\c test_json_length +create schema test_json_length; +set current_schema to 'test_json_length'; select json_length(NULL); json_length ------------- @@ -177,5 +175,5 @@ insert into test values (2 rows) drop table test; -\c postgres; -drop database if exists test_json_length; +drop schema test_json_length cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_merge_patch.out b/contrib/dolphin/expected/json_merge_patch.out index 1da85515f..a94d75b24 100644 --- a/contrib/dolphin/expected/json_merge_patch.out +++ b/contrib/dolphin/expected/json_merge_patch.out @@ -1,7 +1,5 @@ -drop database if exists test_json_merge_patch; -NOTICE: database "test_json_merge_patch" does not exist, skipping -create database test_json_merge_patch dbcompatibility 'B'; -\c test_json_merge_patch +create schema test_json_merge_patch; +set current_schema to 'test_json_merge_patch'; select json_merge_patch(NULL); ERROR: Incorrect parameter count CONTEXT: referenced column: json_merge_patch @@ -340,5 +338,8 @@ insert into test1 values {"a": [1, 2], "colin": "huawei", "colinew": "handsome"} | {"1": "jks"} (1 row) -\c postgres; -drop database if exists test_json_merge_patch; +drop schema test_json_merge_patch cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test +drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_merge_preserve.out b/contrib/dolphin/expected/json_merge_preserve.out index ed903d51d..3bc9e63fe 100644 --- a/contrib/dolphin/expected/json_merge_preserve.out +++ b/contrib/dolphin/expected/json_merge_preserve.out @@ -1,7 +1,5 @@ -drop database if exists test_json_merge_preserve; -NOTICE: database "test_json_merge_preserve" does not exist, skipping -create database test_json_merge_preserve dbcompatibility 'B'; -\c test_json_merge_preserve +create schema test_json_merge_preserve; +set current_schema to 'test_json_merge_preserve'; select json_merge_preserve(NULL); ERROR: Incorrect parameter count CONTEXT: referenced column: json_merge_preserve @@ -340,5 +338,8 @@ insert into test1 values {"a": [1, 2], "colin": [{"a": "abc"}, "bcd", "huawei"], "colinew": "handsome"} | [{"a": "abc"}, "bcd", {"1": "jks"}] (1 row) -\c postgres; -drop database if exists test_json_merge_preserve; +drop schema test_json_merge_preserve cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test +drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_object.out b/contrib/dolphin/expected/json_object.out index 302a4acc5..0927e24dc 100644 --- a/contrib/dolphin/expected/json_object.out +++ b/contrib/dolphin/expected/json_object.out @@ -1,7 +1,5 @@ -drop database if exists test_json_object; -NOTICE: database "test_json_object" does not exist, skipping -create database test_json_object dbcompatibility 'B'; -\c test_json_object +create schema test_json_object; +set current_schema to 'test_json_object'; -- test for b_compatibility_mode = false select json_object('{a,1,b,2,3,NULL,"d e f","a b c"}'); json_object @@ -364,5 +362,8 @@ select json_object('{a,b,"a b c"}', '{a,1,1}'); {"a" : "a", "b" : "1", "a b c" : "1"} (1 row) -\c postgres -drop database if exists test_json_object; +drop schema test_json_object cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table tab_json1 +drop cascades to table info1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_objectagg.out b/contrib/dolphin/expected/json_objectagg.out index b676b21fc..c070f9c30 100644 --- a/contrib/dolphin/expected/json_objectagg.out +++ b/contrib/dolphin/expected/json_objectagg.out @@ -1,7 +1,5 @@ -drop database if exists json_objectagg_test; -NOTICE: database "json_objectagg_test" does not exist, skipping -create database json_objectagg_test dbcompatibility 'B'; -\c json_objectagg_test +create schema json_objectagg_test; +set current_schema to 'json_objectagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -64,5 +62,6 @@ select json_objectagg(b, a) from time_table; {"1": "08-22-2020", "2": "10-01-2021", "3": "12-04-2022"} (1 row) -\c postgres -drop database json_objectagg_test; +drop schema json_objectagg_test cascade; +NOTICE: drop cascades to table city +reset current_schema; diff --git a/contrib/dolphin/expected/json_operator.out b/contrib/dolphin/expected/json_operator.out index dbd6fe019..b69300b04 100644 --- a/contrib/dolphin/expected/json_operator.out +++ b/contrib/dolphin/expected/json_operator.out @@ -1,7 +1,5 @@ -drop database if exists test_operator; -NOTICE: database "test_operator" does not exist, skipping -create database test_operator dbcompatibility 'B'; -\c test_operator +create schema test_operator; +set current_schema to 'test_operator'; drop table if exists test1; NOTICE: table "test1" does not exist, skipping create table test1(data json); @@ -152,5 +150,8 @@ select data->>'c' from test2; susan (3 rows) -\c postgres -drop database test_operator; +drop schema test_operator cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test1 +drop cascades to table test2 +reset current_schema; diff --git a/contrib/dolphin/expected/json_pretty.out b/contrib/dolphin/expected/json_pretty.out index f65d4db78..41f48dbea 100644 --- a/contrib/dolphin/expected/json_pretty.out +++ b/contrib/dolphin/expected/json_pretty.out @@ -1,7 +1,5 @@ -drop database if exists test_json_pretty; -NOTICE: database "test_json_pretty" does not exist, skipping -create database test_json_pretty dbcompatibility'B'; -\c test_json_pretty +create schema test_json_pretty; +set current_schema to 'test_json_pretty'; -- test for basic functionality of json_replace select JSON_PRETTY('{"a": 43}'); json_pretty @@ -342,5 +340,5 @@ select * from test; } (5 rows) -\c postgres -drop database test_json_pretty; +drop schema test_json_pretty cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_quote.out b/contrib/dolphin/expected/json_quote.out index 390a6146f..30302c643 100644 --- a/contrib/dolphin/expected/json_quote.out +++ b/contrib/dolphin/expected/json_quote.out @@ -1,7 +1,5 @@ -drop database if exists test_json_quote; -NOTICE: database "test_json_quote" does not exist, skipping -create database test_json_quote dbcompatibility'B'; -\c test_json_quote +create schema test_json_quote; +set current_schema to 'test_json_quote'; select json_quote(E'a\tb'); json_quote ------------ @@ -82,5 +80,5 @@ select name from student; (1 row) drop table student; -\c postgres -drop database test_json_quote; +drop schema test_json_quote cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_remove.out b/contrib/dolphin/expected/json_remove.out index 5230c7574..eed2e5b5a 100644 --- a/contrib/dolphin/expected/json_remove.out +++ b/contrib/dolphin/expected/json_remove.out @@ -1,7 +1,5 @@ -drop database if exists test_json_remove; -NOTICE: database "test_json_remove" does not exist, skipping -create database test_json_remove dbcompatibility'B'; -\c test_json_remove +create schema test_json_remove; +set current_schema to 'test_json_remove'; SELECT JSON_REMOVE('[0, 1, 2, [3, 4]]', '$[0]', '$[2]'); json_remove ------------- @@ -114,5 +112,8 @@ select * from tab_json1; {"x": {"a": 3}, "y": 2} (1 row) -\c postgres -drop database if exists test_json_remove; +drop schema test_json_remove cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table tab_json1 +drop cascades to table info1 +reset current_schema; diff --git a/contrib/dolphin/expected/json_replace.out b/contrib/dolphin/expected/json_replace.out index b6dc21076..6aaba5f09 100644 --- a/contrib/dolphin/expected/json_replace.out +++ b/contrib/dolphin/expected/json_replace.out @@ -1,7 +1,5 @@ -drop database if exists test_json_replace; -NOTICE: database "test_json_replace" does not exist, skipping -create database test_json_replace dbcompatibility 'B'; -\c test_json_replace +create schema test_json_replace; +set current_schema to 'test_json_replace'; -- test for basic functionality of json_replace SELECT JSON_REPLACE('{"a": 1, "b": 2, "c": 3}', '$.b', 9); json_replace @@ -243,5 +241,5 @@ CONTEXT: referenced column: json_replace SELECT JSON_REPLACE('x',2,2); ERROR: Invalid JSON text in argument 1 to function json_replace. CONTEXT: referenced column: json_replace -\c postgres -drop database if exists test_json_replace; +drop schema test_json_replace cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_search.out b/contrib/dolphin/expected/json_search.out index 365cb8463..6eab60d10 100644 --- a/contrib/dolphin/expected/json_search.out +++ b/contrib/dolphin/expected/json_search.out @@ -1,7 +1,5 @@ -drop database if exists test_json_search; -NOTICE: database "test_json_search" does not exist, skipping -create database test_json_search dbcompatibility'B'; -\c test_json_search +create schema test_json_search; +set current_schema to 'test_json_search'; select json_search('null','one','null','&','$'); json_search ------------- @@ -810,5 +808,6 @@ select * from json_search_test; (6 rows) drop table json_search_test; -\c postgres; -drop database if exists test_json_search; +drop schema test_json_search cascade; +NOTICE: drop cascades to table c +reset current_schema; diff --git a/contrib/dolphin/expected/json_set.out b/contrib/dolphin/expected/json_set.out index 325d194c1..497aa4ba5 100644 --- a/contrib/dolphin/expected/json_set.out +++ b/contrib/dolphin/expected/json_set.out @@ -1,7 +1,5 @@ -drop database if exists test_json_set; -NOTICE: database "test_json_set" does not exist, skipping -create database test_json_set dbcompatibility 'B'; -\c test_json_set +create schema test_json_set; +set current_schema to 'test_json_set'; select json_set('{"1":2}','$."1"',6); json_set ---------- @@ -108,5 +106,5 @@ select name from dataa; (1 row) drop table dataa; -\c postgres -drop database if exists test_json_set; +drop schema test_json_set cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_storage_size.out b/contrib/dolphin/expected/json_storage_size.out index 05a7e01a2..4206f4d65 100644 --- a/contrib/dolphin/expected/json_storage_size.out +++ b/contrib/dolphin/expected/json_storage_size.out @@ -1,7 +1,5 @@ -drop database if exists test_json_storage_size; -NOTICE: database "test_json_storage_size" does not exist, skipping -create database test_json_storage_size dbcompatibility'B'; -\c test_json_storage_size +create schema test_json_storage_size; +set current_schema to 'test_json_storage_size'; set enable_set_variable_b_format to on; SELECT JSON_STORAGE_SIZE('0'); json_storage_size @@ -141,5 +139,6 @@ FROM SELECT JSON_STORAGE_SIZE('{0,1}'); ERROR: Invalid JSON text in argument 1 to function json_storage_size. CONTEXT: referenced column: json_storage_size -\c postgres -drop database test_json_storage_size +drop schema test_json_storage_size cascade; +NOTICE: drop cascades to table test_json_storage_size +reset current_schema; diff --git a/contrib/dolphin/expected/json_type.out b/contrib/dolphin/expected/json_type.out index 139b025c7..8b4c9cd12 100644 --- a/contrib/dolphin/expected/json_type.out +++ b/contrib/dolphin/expected/json_type.out @@ -1,7 +1,5 @@ -drop database if exists test_json_type; -NOTICE: database "test_json_type" does not exist, skipping -create database test_json_type dbcompatibility'B'; -\c test_json_type +create schema test_json_type; +set current_schema to 'test_json_type'; ---string select json_type('"aa"'); json_type @@ -182,5 +180,6 @@ select json_type(t1) from test_type; object (1 row) -\c postgres -drop database test_json_type; +drop schema test_json_type cascade; +NOTICE: drop cascades to table test_type +reset current_schema; diff --git a/contrib/dolphin/expected/json_unquote.out b/contrib/dolphin/expected/json_unquote.out index 50f6a8ae0..06a0058b9 100644 --- a/contrib/dolphin/expected/json_unquote.out +++ b/contrib/dolphin/expected/json_unquote.out @@ -1,7 +1,5 @@ -drop database if exists test_json_unquote; -NOTICE: database "test_json_unquote" does not exist, skipping -create database test_json_unquote dbcompatibility 'B'; -\c test_json_unquote +create schema test_json_unquote; +set current_schema to 'test_json_unquote'; select json_unquote('"abc"'); json_unquote -------------- @@ -150,5 +148,5 @@ select name from data; (1 row) drop table data; -\c postgres -drop database if exists test_json_unquote; +drop schema test_json_unquote cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/json_valid.out b/contrib/dolphin/expected/json_valid.out index d4bfb757d..bc5d6d038 100644 --- a/contrib/dolphin/expected/json_valid.out +++ b/contrib/dolphin/expected/json_valid.out @@ -1,7 +1,5 @@ -drop database if exists test_json_valid; -NOTICE: database "test_json_valid" does not exist, skipping -create database test_json_valid dbcompatibility 'b'; -\c test_json_valid +create schema test_json_valid; +set current_schema to 'test_json_valid'; select json_valid(NULL); json_valid ------------ @@ -663,5 +661,5 @@ select target, json_valid(target) from json_valid_test; (3 rows) drop table json_valid_test; -\c postgres -drop database if exists test_json_valid; +drop schema test_json_valid cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out index 5102eb672..46cbb6e96 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_invalid_input.out @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_invalid_input_test dbcompatibility 'B'; -\c sql_ignore_invalid_input_test; +create schema sql_ignore_invalid_input_test; +set current_schema to 'sql_ignore_invalid_input_test'; set timezone to 'PRC'; -- type: tinyint drop table if exists t_tinyint; @@ -1143,5 +1143,35 @@ show timezone; PST8PDT (1 row) -\c postgres -drop database if exists sql_ignore_invalid_input_test; +drop schema sql_ignore_invalid_input_test cascade; +NOTICE: drop cascades to 29 other objects +DETAIL: drop cascades to table t_tinyint +drop cascades to table t_tinyint_unsigned +drop cascades to table t_smallint +drop cascades to table t_smallint_unsigned +drop cascades to table t_int +drop cascades to table t_int_unsigned +drop cascades to table t_bigint +drop cascades to table t_bigint_unsigned +drop cascades to table t_float4 +drop cascades to table t_float8 +drop cascades to table t_numeric +drop cascades to table t_date +drop cascades to table t_time +drop cascades to table t_timestamp +drop cascades to table t_timestamptz +drop cascades to table t_timetz +drop cascades to table t_interval +drop cascades to table t_tinterval +drop cascades to table t_smalldatetime +drop cascades to table t_uuid +drop cascades to table t_point +drop cascades to table t_path +drop cascades to table t_polygon +drop cascades to table t_circle +drop cascades to table t_lseg +drop cascades to table t_box +drop cascades to table t_json +drop cascades to table t_jsonb +drop cascades to table t_bit +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out index 85fb47809..19cea54a3 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_no_matched_partition.out @@ -1,6 +1,6 @@ -- test for ignore error of no partition matched -create database sql_ignore_no_matched_partition_test dbcompatibility 'B'; -\c sql_ignore_no_matched_partition_test; +create schema sql_ignore_no_matched_partition_test; +set current_schema to 'sql_ignore_no_matched_partition_test'; -- sqlbypass set enable_opfusion = on; set enable_partition_opfusion = on; @@ -308,5 +308,6 @@ set enable_opfusion = on; set enable_partition_opfusion = off; drop table t_ignore; drop table t_from; -\c postgres -drop database if exists sql_ignore_no_matched_partition_test; +drop schema sql_ignore_no_matched_partition_test cascade; +NOTICE: drop cascades to table ignore_range_range +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out index 9a9b3f84b..d9006d017 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_not_null_constraints.out @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_not_null_test dbcompatibility 'B'; -\c sql_ignore_not_null_test; +create schema sql_ignore_not_null_test; +set current_schema to 'sql_ignore_not_null_test'; drop table if exists t_ignore; NOTICE: table "t_ignore" does not exist, skipping create table t_ignore(col1 int, col2 int not null, col3 varchar not null); @@ -1482,5 +1482,39 @@ select * from t_ignore; (2 rows) -- restore context -\c postgres -drop database if exists sql_ignore_not_null_test; \ No newline at end of file +drop schema sql_ignore_not_null_test cascade; +NOTICE: drop cascades to 33 other objects +DETAIL: drop cascades to table t_from +drop cascades to table t_timestamp +drop cascades to table t_timetz +drop cascades to table t_time +drop cascades to table t_interval +drop cascades to table t_tinterval +drop cascades to table t_smalldatetime +drop cascades to table t_date +drop cascades to table t_uuid +drop cascades to table t_name +drop cascades to table t_point +drop cascades to table t_path +drop cascades to table t_polygon +drop cascades to table t_circle +drop cascades to table t_box +drop cascades to table t_json +drop cascades to table t_jsonb +drop cascades to table t_bit +drop cascades to table t_tinyint +drop cascades to table t_smallint +drop cascades to table t_int +drop cascades to table t_bigint +drop cascades to table t_float +drop cascades to table t_float8 +drop cascades to table t_numeric +drop cascades to table t_serial +drop cascades to table t_bool +drop cascades to table t_charn +drop cascades to table t_varcharn +drop cascades to table t_text +drop cascades to table t_not_null_key_partition +drop cascades to table ignore_range_range +drop cascades to table t_ignore +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out index 23e35c35c..8863bf8fe 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_type_transform.out @@ -1,5 +1,5 @@ -create database sql_ignore_type_transform_test dbcompatibility 'B'; -\c sql_ignore_type_transform_test; +create schema sql_ignore_type_transform_test; +set current_schema to 'sql_ignore_type_transform_test'; -- test for tinyint drop table if exists t; NOTICE: table "t" does not exist, skipping @@ -788,5 +788,20 @@ select * from t_nvarchar2; 123456789123456789 (3 rows) -\c postgres -drop database if exists sql_ignore_type_transform_test; +drop schema sql_ignore_type_transform_test cascade; +NOTICE: drop cascades to 14 other objects +DETAIL: drop cascades to table t +drop cascades to table t_tinyint +drop cascades to table t_smallint +drop cascades to table t_int +drop cascades to table t_bigint +drop cascades to table t_numeric +drop cascades to table t_float4 +drop cascades to table t_char +drop cascades to table t_varchar +drop cascades to table t_nchar +drop cascades to table t_character +drop cascades to table t_varchar2 +drop cascades to table t_nvarchar2 +drop cascades to table t_text +reset current_schema; diff --git a/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out b/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out index 276cec58d..e0cad03a6 100644 --- a/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out +++ b/contrib/dolphin/expected/keyword_ignore_test/ignore_unique_constraints.out @@ -1,5 +1,5 @@ -create database sql_ignore_unique_test dbcompatibility 'B'; -\c sql_ignore_unique_test; +create schema sql_ignore_unique_test; +set current_schema to 'sql_ignore_unique_test'; drop table if exists t_ignore; NOTICE: table "t_ignore" does not exist, skipping create table t_ignore(col1 int, col2 int unique, col3 int unique); @@ -365,5 +365,10 @@ select * from t_ignore; 2 (2 rows) -\c postgres -drop database if exists sql_ignore_unique_test; +drop schema sql_ignore_unique_test cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table t_unique_upsert +drop cascades to table t_unique_key_partition +drop cascades to table ignore_range_range +drop cascades to table t_ignore +reset current_schema; diff --git a/contrib/dolphin/expected/kill.out b/contrib/dolphin/expected/kill.out index e389eb038..c41bcae03 100644 --- a/contrib/dolphin/expected/kill.out +++ b/contrib/dolphin/expected/kill.out @@ -1,7 +1,5 @@ -drop database if exists test_kill; -NOTICE: database "test_kill" does not exist, skipping -create database test_kill dbcompatibility 'b'; -\c test_kill +create schema test_kill; +set current_schema to 'test_kill'; kill query (select sessionid from pg_stat_activity where application_name = 'JobScheduler'); result -------- @@ -20,5 +18,5 @@ kill (select sessionid from pg_stat_activity where application_name = 'Percentil t (1 row) -\c postgres -drop database if exists test_kill; +drop schema test_kill cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/like_default_test.out b/contrib/dolphin/expected/like_default_test.out index b29149f91..432739093 100644 --- a/contrib/dolphin/expected/like_default_test.out +++ b/contrib/dolphin/expected/like_default_test.out @@ -1,13 +1,9 @@ --- b compatibility case -drop database if exists like_default_test; -NOTICE: database "like_default_test" does not exist, skipping --- create database like_default_test dbcompatibility 'b'; -create database like_default_test with DBCOMPATIBILITY = 'B'; -\c like_default_test +create schema like_default_test; +set current_schema to 'like_default_test'; create table test_nv (name national varchar(10)); \d test_nv - Table "public.test_nv" + Table "like_default_test.test_nv" Column | Type | Modifiers --------+---------------+----------- name | nvarchar2(10) | @@ -15,7 +11,7 @@ create table test_nv (name national varchar(10)); drop table if exists test_nv; create table test_nv (id int, name national varchar(10)); \d test_nv - Table "public.test_nv" + Table "like_default_test.test_nv" Column | Type | Modifiers --------+---------------+----------- id | integer | @@ -24,7 +20,7 @@ create table test_nv (id int, name national varchar(10)); drop table if exists test_nv; create table test_nv (id int, name nvarchar(10)); \d test_nv - Table "public.test_nv" + Table "like_default_test.test_nv" Column | Type | Modifiers --------+---------------+----------- id | integer | @@ -45,7 +41,7 @@ partition by range(id) ); create table test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -53,7 +49,7 @@ create table test_non_like1 (like test_non); create table test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -61,7 +57,7 @@ create table test_non_like2 like test_non; create table test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -69,7 +65,7 @@ create table test_part_like1 (like test_part); create table test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -81,7 +77,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table if not exists test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -89,7 +85,7 @@ create table if not exists test_non_like1 (like test_non); create table if not exists test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -97,7 +93,7 @@ create table if not exists test_non_like2 like test_non; create table if not exists test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -105,7 +101,7 @@ create table if not exists test_part_like1 (like test_part); create table if not exists test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -117,7 +113,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -125,7 +121,7 @@ create table test_non_like1 (like test_non including indexes); create table test_non_like2 like test_non including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -133,7 +129,7 @@ create table test_non_like2 like test_non including indexes; create table test_part_like1 (like test_part including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -141,7 +137,7 @@ create table test_part_like1 (like test_part including indexes); create table test_part_like2 like test_part including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -159,7 +155,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -169,7 +165,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -189,7 +185,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -199,7 +195,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -213,7 +209,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non including all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -221,7 +217,7 @@ create table test_non_like1 (like test_non including all); create table test_non_like2 like test_non including all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -229,7 +225,7 @@ create table test_non_like2 like test_non including all; create table test_part_like1 (like test_part including all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -239,7 +235,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -251,7 +247,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -259,7 +255,7 @@ create table test_non_like1 (like test_non including all excluding indexes); create table test_non_like2 like test_non including all excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -267,7 +263,7 @@ create table test_non_like2 like test_non including all excluding indexes; create table test_part_like1 (like test_part including all excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -277,7 +273,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -289,7 +285,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -297,7 +293,7 @@ create table test_non_like1 (like test_non including all excluding partition); create table test_non_like2 like test_non including all excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -305,7 +301,7 @@ create table test_non_like2 like test_non including all excluding partition; create table test_part_like1 (like test_part including all excluding partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -313,7 +309,7 @@ create table test_part_like1 (like test_part including all excluding partition); create table test_part_like2 like test_part including all excluding partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -323,7 +319,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -331,7 +327,7 @@ create table test_non_like1 (like test_non including all excluding partition exc create table test_non_like2 like test_non including all excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -339,7 +335,7 @@ create table test_non_like2 like test_non including all excluding partition excl create table test_part_like1 (like test_part including all excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -347,7 +343,7 @@ create table test_part_like1 (like test_part including all excluding partition e create table test_part_like2 like test_part including all excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -357,7 +353,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -365,7 +361,7 @@ create table test_non_like1 (like test_non excluding indexes); create table test_non_like2 like test_non excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -373,7 +369,7 @@ create table test_non_like2 like test_non excluding indexes; create table test_part_like1 (like test_part excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -381,7 +377,7 @@ create table test_part_like1 (like test_part excluding indexes); create table test_part_like2 like test_part excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -393,7 +389,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -401,7 +397,7 @@ create table test_non_like1 (like test_non excluding partition); create table test_non_like2 like test_non excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -409,7 +405,7 @@ create table test_non_like2 like test_non excluding partition; create table test_part_like1 (like test_part excluding partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -417,7 +413,7 @@ create table test_part_like1 (like test_part excluding partition); create table test_part_like2 like test_part excluding partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -427,7 +423,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding reloptions); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -435,7 +431,7 @@ create table test_non_like1 (like test_non excluding reloptions); create table test_non_like2 like test_non excluding reloptions; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -443,7 +439,7 @@ create table test_non_like2 like test_non excluding reloptions; create table test_part_like1 (like test_part excluding reloptions); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -451,7 +447,7 @@ create table test_part_like1 (like test_part excluding reloptions); create table test_part_like2 like test_part excluding reloptions; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -463,7 +459,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -471,7 +467,7 @@ create table test_non_like1 (like test_non excluding partition excluding indexes create table test_non_like2 like test_non excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -479,7 +475,7 @@ create table test_non_like2 like test_non excluding partition excluding indexes; create table test_part_like1 (like test_part excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -487,7 +483,7 @@ create table test_part_like1 (like test_part excluding partition excluding index create table test_part_like2 like test_part excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -497,7 +493,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -505,7 +501,7 @@ create table test_non_like1 (like test_non excluding all); create table test_non_like2 like test_non excluding all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -513,7 +509,7 @@ create table test_non_like2 like test_non excluding all; create table test_part_like1 (like test_part excluding all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -521,7 +517,7 @@ create table test_part_like1 (like test_part excluding all); create table test_part_like2 like test_part excluding all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -537,7 +533,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -547,7 +543,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -561,7 +557,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non excluding all including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -569,7 +565,7 @@ create table test_non_like1 (like test_non excluding all including indexes); create table test_non_like2 like test_non excluding all including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -577,7 +573,7 @@ create table test_non_like2 like test_non excluding all including indexes; create table test_part_like1 (like test_part excluding all including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -585,7 +581,7 @@ create table test_part_like1 (like test_part excluding all including indexes); create table test_part_like2 like test_part excluding all including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -601,7 +597,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -611,7 +607,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -628,7 +624,7 @@ create index test_part_id_idx on test_part(id); create table test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -636,7 +632,7 @@ create table test_non_like1 (like test_non); create table test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -646,7 +642,7 @@ Indexes: create table test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -654,7 +650,7 @@ create table test_part_like1 (like test_part); create table test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -668,7 +664,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table if not exists test_non_like1 (like test_non); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -676,7 +672,7 @@ create table if not exists test_non_like1 (like test_non); create table if not exists test_non_like2 like test_non; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -686,7 +682,7 @@ Indexes: create table if not exists test_part_like1 (like test_part); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -694,7 +690,7 @@ create table if not exists test_part_like1 (like test_part); create table if not exists test_part_like2 like test_part; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -708,7 +704,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -718,7 +714,7 @@ Indexes: create table test_non_like2 like test_non including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -731,7 +727,7 @@ ERROR: non-partitioned table does not support global partitioned indexes \d test_part_like1 create table test_part_like2 like test_part including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -752,7 +748,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -762,7 +758,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -784,7 +780,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -796,7 +792,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -812,7 +808,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non including all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -822,7 +818,7 @@ Indexes: create table test_non_like2 like test_non including all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -832,7 +828,7 @@ Indexes: create table test_part_like1 (like test_part including all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -844,7 +840,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -858,7 +854,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -866,7 +862,7 @@ create table test_non_like1 (like test_non including all excluding indexes); create table test_non_like2 like test_non including all excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -874,7 +870,7 @@ create table test_non_like2 like test_non including all excluding indexes; create table test_part_like1 (like test_part including all excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -884,7 +880,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part including all excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -896,7 +892,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non including all excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -906,7 +902,7 @@ Indexes: create table test_non_like2 like test_non including all excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -926,7 +922,7 @@ NOTICE: table "test_part_like2" does not exist, skipping create table test_non_like1 (like test_non including all excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -934,7 +930,7 @@ create table test_non_like1 (like test_non including all excluding partition exc create table test_non_like2 like test_non including all excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -942,7 +938,7 @@ create table test_non_like2 like test_non including all excluding partition excl create table test_part_like1 (like test_part including all excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -950,7 +946,7 @@ create table test_part_like1 (like test_part including all excluding partition e create table test_part_like2 like test_part including all excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -960,7 +956,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -968,7 +964,7 @@ create table test_non_like1 (like test_non excluding indexes); create table test_non_like2 like test_non excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -976,7 +972,7 @@ create table test_non_like2 like test_non excluding indexes; create table test_part_like1 (like test_part excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -984,7 +980,7 @@ create table test_part_like1 (like test_part excluding indexes); create table test_part_like2 like test_part excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -996,7 +992,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding partition); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1004,7 +1000,7 @@ create table test_non_like1 (like test_non excluding partition); create table test_non_like2 like test_non excluding partition; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1014,7 +1010,7 @@ Indexes: create table test_part_like1 (like test_part excluding partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1028,7 +1024,7 @@ NOTICE: table "test_part_like2" does not exist, skipping create table test_non_like1 (like test_non excluding partition excluding indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1036,7 +1032,7 @@ create table test_non_like1 (like test_non excluding partition excluding indexes create table test_non_like2 like test_non excluding partition excluding indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1044,7 +1040,7 @@ create table test_non_like2 like test_non excluding partition excluding indexes; create table test_part_like1 (like test_part excluding partition excluding indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1052,7 +1048,7 @@ create table test_part_like1 (like test_part excluding partition excluding index create table test_part_like2 like test_part excluding partition excluding indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1062,7 +1058,7 @@ drop table if exists test_non_like1, test_non_like2, test_part_like1, test_part_ create table test_non_like1 (like test_non excluding all); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1070,7 +1066,7 @@ create table test_non_like1 (like test_non excluding all); create table test_non_like2 like test_non excluding all; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1078,7 +1074,7 @@ create table test_non_like2 like test_non excluding all; create table test_part_like1 (like test_part excluding all); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1086,7 +1082,7 @@ create table test_part_like1 (like test_part excluding all); create table test_part_like2 like test_part excluding all; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1102,7 +1098,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1112,7 +1108,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1126,7 +1122,7 @@ NOTICE: table "test_non_like2" does not exist, skipping create table test_non_like1 (like test_non excluding all including indexes); \d test_non_like1 - Table "public.test_non_like1" + Table "like_default_test.test_non_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1136,7 +1132,7 @@ Indexes: create table test_non_like2 like test_non excluding all including indexes; \d test_non_like2 - Table "public.test_non_like2" + Table "like_default_test.test_non_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1162,7 +1158,7 @@ ERROR: could not specify "INCLUDING PARTITION" for non-partitioned-table relati \d test_non_like2 create table test_part_like1 (like test_part excluding all including partition including indexes); \d test_part_like1 - Table "public.test_part_like1" + Table "like_default_test.test_part_like1" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1174,7 +1170,7 @@ Number of partitions: 5 (View pg_partition to check each partition range.) create table test_part_like2 like test_part excluding all including partition including indexes; \d test_part_like2 - Table "public.test_part_like2" + Table "like_default_test.test_part_like2" Column | Type | Modifiers --------+-----------------------+----------- id | integer | @@ -1262,11 +1258,6 @@ select * from test_insert; (15 rows) -\c postgres -drop database if exists like_default_test; - - - - - - +drop schema like_default_test cascade; +NOTICE: drop cascades to table test_insert +reset current_schema; diff --git a/contrib/dolphin/expected/mysqlmode_fullgroup.out b/contrib/dolphin/expected/mysqlmode_fullgroup.out index e611c2559..85024355b 100644 --- a/contrib/dolphin/expected/mysqlmode_fullgroup.out +++ b/contrib/dolphin/expected/mysqlmode_fullgroup.out @@ -1,5 +1,5 @@ -CREATE DATABASE sql_mode_full_group dbcompatibility 'B'; -\c sql_mode_full_group; +create schema sql_mode_full_group; +set current_schema to 'sql_mode_full_group'; create table test_group(a int, b int, c int, d int); create table test_group1(a int, b int, c int, d int); insert into test_group values(1,2,3,4); @@ -48,5 +48,8 @@ select t.a, (select sum(b) from test_group i where i.b = t.b ) from test_group t 2 | 8 (3 rows) -\c contrib_regression; -drop DATABASE if exists sql_mode_full_group; +drop schema sql_mode_full_group cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test_group +drop cascades to table test_group1 +reset current_schema; diff --git a/contrib/dolphin/expected/mysqlmode_strict.out b/contrib/dolphin/expected/mysqlmode_strict.out index 0daf8e196..1dfc6c51f 100644 --- a/contrib/dolphin/expected/mysqlmode_strict.out +++ b/contrib/dolphin/expected/mysqlmode_strict.out @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict; -NOTICE: database "sql_mode_strict" does not exist, skipping -CREATE DATABASE sql_mode_strict dbcompatibility 'B'; -\c sql_mode_strict; +create schema sql_mode_strict; +set current_schema to 'sql_mode_strict'; set dolphin.sql_mode = ''; create table test_tint(a tinyint); create table test_sint(a smallint); @@ -2183,5 +2181,57 @@ DETAIL: Failing row contains (null, null). insert into test_notnull_numeric_strict(b) values(null); ERROR: null value in column "b" violates not-null constraint DETAIL: Failing row contains (null, null). -\c contrib_regression; -drop DATABASE if exists sql_mode_strict; +drop schema sql_mode_strict cascade; +NOTICE: drop cascades to 51 other objects +DETAIL: drop cascades to table test_tint +drop cascades to table test_sint +drop cascades to table test_int +drop cascades to table test_bint +drop cascades to table test_float4 +drop cascades to table test_float8 +drop cascades to table test_char +drop cascades to table test_varchar7 +drop cascades to table test_tint1 +drop cascades to table test_sint1 +drop cascades to table test_int1 +drop cascades to table test_bint1 +drop cascades to table test_char1 +drop cascades to table test_varchar7_1 +drop cascades to table test_notnull_tint +drop cascades to table test_notnull_sint +drop cascades to table test_notnull_int +drop cascades to table test_notnull_bint +drop cascades to table test_notnull_char +drop cascades to table test_notnull_varchar +drop cascades to table test_notnull_clob +drop cascades to table test_notnull_text +drop cascades to table test_notnull_real +drop cascades to table test_notnull_double +drop cascades to table test_notnull_numeric +drop cascades to table test_multi_default +drop cascades to table test_tint_strict +drop cascades to table test_sint_strict +drop cascades to table test_int_strict +drop cascades to table test_bint_strict +drop cascades to table test_float4_strict +drop cascades to table test_float8_strict +drop cascades to table test_char_strict +drop cascades to table test_varchar7_strict +drop cascades to table test_tint_strict1 +drop cascades to table test_sint_strict1 +drop cascades to table test_int_strict1 +drop cascades to table test_bint_strict1 +drop cascades to table test_char_strict1 +drop cascades to table test_varchar7_strict_1 +drop cascades to table test_notnull_tint_strict +drop cascades to table test_notnull_sint_strict +drop cascades to table test_notnull_int_strict +drop cascades to table test_notnull_bint_strict +drop cascades to table test_notnull_char_strict +drop cascades to table test_notnull_varchar_strict +drop cascades to table test_notnull_clob_strict +drop cascades to table test_notnull_text_strict +drop cascades to table test_notnull_real_strict +drop cascades to table test_notnull_double_strict +drop cascades to table test_notnull_numeric_strict +reset current_schema; diff --git a/contrib/dolphin/expected/mysqlmode_strict2.out b/contrib/dolphin/expected/mysqlmode_strict2.out index f94314099..ff8f9d6d6 100644 --- a/contrib/dolphin/expected/mysqlmode_strict2.out +++ b/contrib/dolphin/expected/mysqlmode_strict2.out @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict2; -NOTICE: database "sql_mode_strict2" does not exist, skipping -CREATE DATABASE sql_mode_strict2 dbcompatibility 'B'; -\c sql_mode_strict2; +create schema sql_mode_strict2; +set current_schema to 'sql_mode_strict2'; set dolphin.sql_mode = ''; create table test_tint(a tinyint unsigned); create table test_sint(a smallint unsigned); @@ -2174,5 +2172,57 @@ DETAIL: Failing row contains (null, null). insert into test_notnull_numeric_strict(b) values(null); ERROR: null value in column "b" violates not-null constraint DETAIL: Failing row contains (null, null). -\c contrib_regression; -drop DATABASE if exists sql_mode_strict2; +drop schema sql_mode_strict2 cascade; +NOTICE: drop cascades to 51 other objects +DETAIL: drop cascades to table test_tint +drop cascades to table test_sint +drop cascades to table test_int +drop cascades to table test_bint +drop cascades to table test_float4 +drop cascades to table test_float8 +drop cascades to table test_char +drop cascades to table test_varchar7 +drop cascades to table test_tint1 +drop cascades to table test_sint1 +drop cascades to table test_int1 +drop cascades to table test_bint1 +drop cascades to table test_char1 +drop cascades to table test_varchar7_1 +drop cascades to table test_notnull_tint +drop cascades to table test_notnull_sint +drop cascades to table test_notnull_int +drop cascades to table test_notnull_bint +drop cascades to table test_notnull_char +drop cascades to table test_notnull_varchar +drop cascades to table test_notnull_clob +drop cascades to table test_notnull_text +drop cascades to table test_notnull_real +drop cascades to table test_notnull_double +drop cascades to table test_notnull_numeric +drop cascades to table test_multi_default +drop cascades to table test_tint_strict +drop cascades to table test_sint_strict +drop cascades to table test_int_strict +drop cascades to table test_bint_strict +drop cascades to table test_float4_strict +drop cascades to table test_float8_strict +drop cascades to table test_char_strict +drop cascades to table test_varchar7_strict +drop cascades to table test_tint_strict1 +drop cascades to table test_sint_strict1 +drop cascades to table test_int_strict1 +drop cascades to table test_bint_strict1 +drop cascades to table test_char_strict1 +drop cascades to table test_varchar7_strict_1 +drop cascades to table test_notnull_tint_strict +drop cascades to table test_notnull_sint_strict +drop cascades to table test_notnull_int_strict +drop cascades to table test_notnull_bint_strict +drop cascades to table test_notnull_char_strict +drop cascades to table test_notnull_varchar_strict +drop cascades to table test_notnull_clob_strict +drop cascades to table test_notnull_text_strict +drop cascades to table test_notnull_real_strict +drop cascades to table test_notnull_double_strict +drop cascades to table test_notnull_numeric_strict +reset current_schema; diff --git a/contrib/dolphin/expected/network.out b/contrib/dolphin/expected/network.out index 5de44220f..36f950817 100644 --- a/contrib/dolphin/expected/network.out +++ b/contrib/dolphin/expected/network.out @@ -1,7 +1,5 @@ -drop database if exists test_network; -NOTICE: database "test_network" does not exist, skipping -create database test_network dbcompatibility 'b'; -\c test_network +create schema test_network; +set current_schema to 'test_network'; create table test (ip1 varchar(20),ip2 char(20),ip3 nvarchar2(20),ip4 text,ip5 clob); insert into test (ip1,ip2,ip3,ip4,ip5) values ('192.168.1.1','127.0.0.1','10.0.0.10','172.0.0.1','0.0.0.0'),('fe80::1','a::f','a::c','a::d','a::e'),('192.168.1.256','192.168.1','256.168.1.1','192.256.1.1','192.168.1.-1'); select is_ipv4(ip1),is_ipv4(ip2),is_ipv4(ip3),is_ipv4(ip4),is_ipv4(ip5) from test; @@ -140,5 +138,6 @@ select is_ipv6(NULL); 0 (1 row) -\c postgres -drop database if exists test_network; +drop schema test_network cascade; +NOTICE: drop cascades to table test +reset current_schema; diff --git a/contrib/dolphin/expected/network2.out b/contrib/dolphin/expected/network2.out index cf61f204d..61390761e 100644 --- a/contrib/dolphin/expected/network2.out +++ b/contrib/dolphin/expected/network2.out @@ -1,7 +1,5 @@ -drop database if exists network2; -NOTICE: database "network2" does not exist, skipping -create database network2 dbcompatibility 'b'; -\c network2 +create schema network2; +set current_schema to 'network2'; set dolphin.sql_mode = ''; select inet_ntoa(inet_aton('255.255.255.255.255.255.255.255')); inet_ntoa @@ -850,5 +848,5 @@ SELECT IS_IPV4_MAPPED(NULL),IS_IPV4_COMPAT(NULL); (1 row) reset dolphin.sql_mode; -\c postgres -drop database if exists network2; +drop schema network2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/none_strict_warning.out b/contrib/dolphin/expected/none_strict_warning.out index 4547e51cd..75db18606 100644 --- a/contrib/dolphin/expected/none_strict_warning.out +++ b/contrib/dolphin/expected/none_strict_warning.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists none_strict_warning_test; -NOTICE: database "none_strict_warning_test" does not exist, skipping -create database none_strict_warning_test dbcompatibility 'b'; -\c none_strict_warning_test +create schema none_strict_warning_test; +set current_schema to 'none_strict_warning_test'; reset dolphin.sql_mode; create table test_int1(c1 int1); create table test_int8(c1 int8); @@ -2571,6 +2568,14 @@ select * from test_uint8; 0 (26 rows) ----- drop database -\c contrib_regression -DROP DATABASE none_strict_warning_test; +drop schema none_strict_warning_test cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table test_int1 +drop cascades to table test_int8 +drop cascades to table test_int4 +drop cascades to table test_int2 +drop cascades to table test_uint1 +drop cascades to table test_uint2 +drop cascades to table test_uint4 +drop cascades to table test_uint8 +reset current_schema; diff --git a/contrib/dolphin/expected/nvarchar.out b/contrib/dolphin/expected/nvarchar.out index ee026d565..850163d3b 100644 --- a/contrib/dolphin/expected/nvarchar.out +++ b/contrib/dolphin/expected/nvarchar.out @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists db_nvarchar; -NOTICE: database "db_nvarchar" does not exist, skipping -create database db_nvarchar dbcompatibility 'b'; -\c db_nvarchar +create schema db_nvarchar; +set current_schema to 'db_nvarchar'; -- -- VARCHAR -- @@ -116,5 +113,6 @@ SELECT '' AS four, * FROM NVARCHAR_TBL; | abcd (4 rows) -\c postgres -drop database if exists db_nvarchar; +drop schema db_nvarchar cascade; +NOTICE: drop cascades to table nvarchar_tbl +reset current_schema; diff --git a/contrib/dolphin/expected/oct.out b/contrib/dolphin/expected/oct.out index 032857dd1..caa7a6503 100644 --- a/contrib/dolphin/expected/oct.out +++ b/contrib/dolphin/expected/oct.out @@ -1,7 +1,5 @@ -drop database if exists db_oct; -NOTICE: database "db_oct" does not exist, skipping -create database db_oct dbcompatibility 'b'; -\c db_oct +create schema db_oct; +set current_schema to 'db_oct'; -- 测试正常数字十进制转八进制 SELECT OCT(10); oct @@ -116,5 +114,5 @@ select oct(name) from test_oct; (2 rows) drop table if exists test_oct; -\c postgres -drop database if exists db_oct; +drop schema db_oct cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/option.out b/contrib/dolphin/expected/option.out index 03ff272ce..bc4b424d6 100644 --- a/contrib/dolphin/expected/option.out +++ b/contrib/dolphin/expected/option.out @@ -1,7 +1,5 @@ -drop database if exists option; -NOTICE: database "option" does not exist, skipping -create database option dbcompatibility = 'b'; -\c option +create schema option; +set current_schema to 'option'; create global temp table test1(a int primary key, b text) on commit delete rows engine = InnoDB with(STORAGE_TYPE = ASTORE); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "test1_pkey" for table "test1" create global temp table test2(id int,vname varchar(48),remark text) engine = InnoDB on commit PRESERVE rows ; @@ -123,5 +121,8 @@ drop table test7; drop table test8; drop table test9; drop table test10; -\c postgres -drop database option; +drop schema option cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table test11 +drop cascades to table test13 +reset current_schema; diff --git a/contrib/dolphin/expected/partition_maxvalue_test.out b/contrib/dolphin/expected/partition_maxvalue_test.out index a94f291ac..dbb5a2832 100644 --- a/contrib/dolphin/expected/partition_maxvalue_test.out +++ b/contrib/dolphin/expected/partition_maxvalue_test.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_maxvalue_test; -NOTICE: database "partition_maxvalue_test" does not exist, skipping -CREATE DATABASE partition_maxvalue_test dbcompatibility 'B'; -\c partition_maxvalue_test; +create schema partition_maxvalue_test; +set current_schema to 'partition_maxvalue_test'; --test MAXVALUE syntax CREATE TABLE IF NOT EXISTS testsubpart ( @@ -84,5 +82,9 @@ DETAIL: The distributed capability is not supported currently. create table testpart5(a int) DISTRIBUTE by range(a) (SLICE p0 start MAXVALUE end (200), SLICE p1 end(300)); ERROR: Un-support feature DETAIL: The distributed capability is not supported currently. -\c postgres; -drop DATABASE if exists partition_maxvalue_test; +drop schema partition_maxvalue_test cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table testsubpart +drop cascades to table testpart +drop cascades to table testpart1 +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test1.out b/contrib/dolphin/expected/partition_test1.out index 353645f4a..69ae2cd04 100644 --- a/contrib/dolphin/expected/partition_test1.out +++ b/contrib/dolphin/expected/partition_test1.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test1; -NOTICE: database "partition_test1" does not exist, skipping -CREATE DATABASE partition_test1 dbcompatibility 'B'; -\c partition_test1; +create schema partition_test1; +set current_schema to 'partition_test1'; -------test range partition tables ----test partition table CREATE TABLE IF NOT EXISTS test_part @@ -714,7 +712,7 @@ NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "uidx_d" for table alter table test_part_list add constraint uidx_c unique using index idx_c; NOTICE: ALTER TABLE / ADD CONSTRAINT USING INDEX will rename index "idx_c" to "uidx_c" insert into test_part_list values(2000,1,2,3),(3000,2,3,4),(4000,3,4,5),(5000,4,5,6); -select * from test_part_list; +select * from test_part_list order by a desc; a | b | c | d ------+---+---+--- 5000 | 4 | 5 | 6 @@ -735,7 +733,7 @@ ALTER TABLE test_part_list REBUILD PARTITION p1, p2; test_part_list (1 row) -select * from test_part_list; +select * from test_part_list order by a desc; a | b | c | d ------+---+---+--- 5000 | 4 | 5 | 6 @@ -758,7 +756,7 @@ ALTER TABLE test_part_list REBUILD PARTITION all; test_part_list (1 row) -select * from test_part_list; +select * from test_part_list order by a desc; a | b | c | d ------+---+---+--- 5000 | 4 | 5 | 6 @@ -1510,5 +1508,11 @@ select * from test_part_segment where ((980 < b and b < 1000) or (2180 < b and b alter table test_part_segment remove partitioning; ERROR: The segment table test_part_segment is not supported CONTEXT: referenced column: remove_partitioning -\c postgres; -drop DATABASE if exists partition_test1; +drop schema partition_test1 cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table test_part_list +drop cascades to table test_part_hash +drop cascades to table test_nopart +drop cascades to table test_part_ustore +drop cascades to table test_part_segment +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test2.out b/contrib/dolphin/expected/partition_test2.out index 19ac3fc01..ae6ec1c0c 100644 --- a/contrib/dolphin/expected/partition_test2.out +++ b/contrib/dolphin/expected/partition_test2.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test2; -NOTICE: database "partition_test2" does not exist, skipping -CREATE DATABASE partition_test2 dbcompatibility 'B'; -\c partition_test2; +create schema partition_test2; +set current_schema to 'partition_test2'; CREATE TABLE IF NOT EXISTS test_part1 ( a int, @@ -565,5 +563,14 @@ alter table test_part_hash analyze partition p0,p1; (1 row) alter table test_part_hash analyze partition all; -\c postgres; -drop DATABASE if exists partition_test2; +drop schema partition_test2 cascade; +NOTICE: drop cascades to 8 other objects +DETAIL: drop cascades to table test_part1 +drop cascades to table test_subpart +drop cascades to table test_no_part1 +drop cascades to table test_part_ustore +drop cascades to table test_no_part2 +drop cascades to table test_part_segment +drop cascades to table test_part_list +drop cascades to table test_part_hash +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test3.out b/contrib/dolphin/expected/partition_test3.out index 55f50a9d9..64945799b 100644 --- a/contrib/dolphin/expected/partition_test3.out +++ b/contrib/dolphin/expected/partition_test3.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test3; -NOTICE: database "partition_test3" does not exist, skipping -CREATE DATABASE partition_test3 dbcompatibility 'B'; -\c partition_test3; +create schema partition_test3; +set current_schema to 'partition_test3'; --test add and drop CREATE TABLE IF NOT EXISTS test_part2 ( @@ -213,5 +211,10 @@ LINE 1: ALTER TABLE test_part2_1 add PARTITION p2 VALUES (add(600,10... HINT: No function matches the given name and argument types. You might need to add explicit type casts. ALTER TABLE test_part2_1 add PARTITION p3 VALUES (DEFAULT) (SUBPARTITION p3_0 VALUES LESS THAN (100)); ERROR: can not add none-range partition to range partition table -\c postgres; -drop DATABASE if exists partition_test3; +drop schema partition_test3 cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table test_part2 +drop cascades to table test_subpart2 +drop cascades to table test_part2_1 +drop cascades to table test_subpart2_1 +reset current_schema; diff --git a/contrib/dolphin/expected/partition_test4.out b/contrib/dolphin/expected/partition_test4.out index ab7c79f1a..d190c9093 100644 --- a/contrib/dolphin/expected/partition_test4.out +++ b/contrib/dolphin/expected/partition_test4.out @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test4; -NOTICE: database "partition_test4" does not exist, skipping -CREATE DATABASE partition_test4 dbcompatibility 'B'; -\c partition_test4; +create schema partition_test4; +set current_schema to 'partition_test4'; CREATE TABLE test_range_subpart ( a INT4 PRIMARY KEY, @@ -70,7 +68,7 @@ alter table test_range_subpart reorganize partition p1,p2 into (partition m1 val select pg_get_tabledef('test_range_subpart'); pg_get_tabledef ------------------------------------------------------------------------------------------------------------------------ - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_range_subpart ( + a integer NOT NULL, + b integer + @@ -142,7 +140,7 @@ alter table test_range_part reorganize partition p1,p2 into (partition m1 values select pg_get_tabledef('test_range_part'); pg_get_tabledef ----------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_range_part ( + a integer NOT NULL, + b integer + @@ -201,7 +199,7 @@ alter table test_list_part reorganize partition p1,p2 into (partition m1 values( select pg_get_tabledef('test_list_part'); pg_get_tabledef --------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_list_part ( + a integer NOT NULL, + b integer + @@ -258,7 +256,7 @@ alter table test_list_subpart reorganize partition p1,p2 into (partition m1 valu select pg_get_tabledef('test_list_subpart'); pg_get_tabledef ---------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_list_subpart ( + a integer NOT NULL, + b integer + @@ -349,7 +347,7 @@ alter table test_part_ustore reorganize partition p1,p2 into (partition m1 value select pg_get_tabledef('test_part_ustore'); pg_get_tabledef ----------------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_part_ustore ( + a integer NOT NULL, + b integer + @@ -420,7 +418,7 @@ alter table test_part_segment reorganize partition p1,p2 into (partition m1 valu select pg_get_tabledef('test_part_segment'); pg_get_tabledef --------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE test_part_segment ( + a integer NOT NULL, + b integer + @@ -497,7 +495,7 @@ insert into b_range_hash_t01 values(1,2,3),(51,3,4); select pg_get_tabledef('b_range_hash_t01'); pg_get_tabledef --------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE b_range_hash_t01 ( + c1 integer NOT NULL, + c2 integer, + @@ -545,7 +543,7 @@ alter table b_range_hash_t01 reorganize partition p1 into (partition m1 values l select pg_get_tabledef('b_range_hash_t01'); pg_get_tabledef --------------------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = partition_test4; + CREATE TABLE b_range_hash_t01 ( + c1 integer NOT NULL, + c2 integer, + @@ -717,5 +715,19 @@ partition p2 values less than(300), partition p3 values less than (maxvalue) ); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "b_range_mt2_pkey" for table "b_range_mt2" -\c postgres; -drop DATABASE if exists partition_test4; +drop schema partition_test4 cascade; +NOTICE: drop cascades to 13 other objects +DETAIL: drop cascades to table test_range_subpart +drop cascades to table test_range_part +drop cascades to table test_list_part +drop cascades to table test_list_subpart +drop cascades to table test_no_part +drop cascades to table test_part_ustore +drop cascades to table test_part_segment +drop cascades to table b_range_hash_t01 +drop cascades to table b_range_hash_t05 +drop cascades to table b_interval_t1 +drop cascades to table b_range_range_t01 +drop cascades to table b_range_mt1 +drop cascades to table b_range_mt2 +reset current_schema; diff --git a/contrib/dolphin/expected/pl_debugger_client.out b/contrib/dolphin/expected/pl_debugger_client.out index 1297ab301..e9784afed 100644 --- a/contrib/dolphin/expected/pl_debugger_client.out +++ b/contrib/dolphin/expected/pl_debugger_client.out @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- wait for server establishment select pg_sleep(3); diff --git a/contrib/dolphin/expected/pl_debugger_server.out b/contrib/dolphin/expected/pl_debugger_server.out index 1f4746d6c..1355d2d35 100644 --- a/contrib/dolphin/expected/pl_debugger_server.out +++ b/contrib/dolphin/expected/pl_debugger_server.out @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- setups drop schema if exists pl_debugger cascade; diff --git a/contrib/dolphin/expected/read_only_guc_test.out b/contrib/dolphin/expected/read_only_guc_test.out old mode 100755 new mode 100644 index 6c76369a3..e7fb2ca6d --- a/contrib/dolphin/expected/read_only_guc_test.out +++ b/contrib/dolphin/expected/read_only_guc_test.out @@ -1,9 +1,5 @@ --- b compatibility case -drop database if exists read_only_guc_test; -NOTICE: database "read_only_guc_test" does not exist, skipping --- create database read_only_guc_test dbcompatibility 'b'; -create database read_only_guc_test with DBCOMPATIBILITY = 'B'; -\c read_only_guc_test +create schema read_only_guc_test; +set current_schema to 'read_only_guc_test'; show version_comment; version_comment @@ -235,5 +231,5 @@ SELECT * FROM pg_settings WHERE NAME='wait_timeout'; (1 row) -\c postgres -drop database if exists read_only_guc_test; +drop schema read_only_guc_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/regexp.out b/contrib/dolphin/expected/regexp.out index 6824f001c..300f26305 100644 --- a/contrib/dolphin/expected/regexp.out +++ b/contrib/dolphin/expected/regexp.out @@ -1,12 +1,10 @@ -drop database if exists db_regexp; -NOTICE: database "db_regexp" does not exist, skipping -create database db_regexp dbcompatibility 'b'; -\c db_regexp +create schema db_regexp; +set current_schema to 'db_regexp'; select regexp('a', true); regexp -------- 1 (1 row) -\c postgres -drop database if exists db_regexp; +drop schema db_regexp cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/replace_test/replace.out b/contrib/dolphin/expected/replace_test/replace.out old mode 100755 new mode 100644 index 98e56aca3..cec863883 --- a/contrib/dolphin/expected/replace_test/replace.out +++ b/contrib/dolphin/expected/replace_test/replace.out @@ -1,7 +1,5 @@ -drop database if exists db_replace; -NOTICE: database "db_replace" does not exist, skipping -create database db_replace dbcompatibility 'B'; -\c db_replace; +create schema db_replace; +set current_schema to 'db_replace'; create table t1 (a int); create table t2 (a int); insert into t1 values(1); @@ -51,5 +49,12 @@ replace into Parts partition(p1) table T2; replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) select A from T2 where A >=2 ; -\c postgres -drop database db_replace; +drop schema db_replace cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table parts +drop cascades to table "T1" +drop cascades to table "T2" +drop cascades to table "Parts" +reset current_schema; diff --git a/contrib/dolphin/expected/second_microsecond.out b/contrib/dolphin/expected/second_microsecond.out index d865a97ac..4786adc83 100644 --- a/contrib/dolphin/expected/second_microsecond.out +++ b/contrib/dolphin/expected/second_microsecond.out @@ -1,5 +1,5 @@ -create database second_microsecond dbcompatibility = 'b'; -\c second_microsecond +create schema second_microsecond; +set current_schema to 'second_microsecond'; select microsecond(timestamp '2021-11-4 16:30:44.3411'); microsecond ------------- @@ -60,5 +60,5 @@ select second(timetz(6) '2021-11-4 16:30:44.3411'); 44 (1 row) -\c postgres -drop database second_microsecond; +drop schema second_microsecond cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/set_password.out b/contrib/dolphin/expected/set_password.out index 1e0352ec2..592c6d63d 100644 --- a/contrib/dolphin/expected/set_password.out +++ b/contrib/dolphin/expected/set_password.out @@ -1,7 +1,5 @@ -drop database if exists test_set_password; -NOTICE: database "test_set_password" does not exist, skipping -create database test_set_password dbcompatibility 'b'; -\c test_set_password +create schema test_set_password; +set current_schema to 'test_set_password'; set password = 'abc@1234'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. set password for current_user() = 'abc@2345'; @@ -17,6 +15,7 @@ set session authorization user1 password 'abc@3456'; set password for 'user1'@'%' = PASSWORD('abc@4567') replace 'abc@3456'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. set session authorization user1 password 'abc@4567'; -\c postgres -drop database if exists test_set_password; +\c contrib_regression drop user user1; +drop schema test_set_password cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show.out b/contrib/dolphin/expected/show.out index 26383f3ce..8340aeb7c 100644 --- a/contrib/dolphin/expected/show.out +++ b/contrib/dolphin/expected/show.out @@ -1,5 +1,5 @@ -create database show_test dbcompatibility 'b'; -\c show_test +create schema show_test; +set current_schema to 'show_test'; create user grant_test identified by 'H&*#^DH85@#(J'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. set search_path = 'grant_test'; @@ -270,5 +270,5 @@ drop cascades to function grant_test.tri_insert_func() drop cascades to function grant_test.tri_update_func() drop cascades to function grant_test.tri_delete_func() drop cascades to function grant_test.tri_truncate_func0010() -\c postgres -drop database show_test; \ No newline at end of file +drop schema show_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show_create.out b/contrib/dolphin/expected/show_create.out index 3511d099f..e3c55daa3 100644 --- a/contrib/dolphin/expected/show_create.out +++ b/contrib/dolphin/expected/show_create.out @@ -1,7 +1,5 @@ -drop database if exists show_create; -NOTICE: database "show_create" does not exist, skipping -create database show_create dbcompatibility 'b'; -\c show_create +create schema show_create; +set current_schema to 'show_create'; CREATE USER test_showcreate WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. GRANT ALL PRIVILEGES TO test_showcreate; @@ -1226,5 +1224,5 @@ reset current_schema; drop schema test_get_def cascade; RESET ROLE; DROP USER test_showcreate; -\c postgres -drop database if exists show_create; +drop schema show_create cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show_create_database.out b/contrib/dolphin/expected/show_create_database.out index 10daeea80..81262b8b3 100644 --- a/contrib/dolphin/expected/show_create_database.out +++ b/contrib/dolphin/expected/show_create_database.out @@ -1,7 +1,5 @@ -drop database if exists show_createdatabase; -NOTICE: database "show_createdatabase" does not exist, skipping -create database show_createdatabase dbcompatibility 'b'; -\c show_createdatabase +create schema show_createdatabase; +set current_schema to 'show_createdatabase'; CREATE USER test_showcreate_database WITH PASSWORD 'openGauss@123'; NOTICE: The iteration value of password is not recommended.Setting the iteration value too small reduces the security of the password, and setting it too large results in performance degradation. GRANT ALL PRIVILEGES TO test_showcreate_database; @@ -64,5 +62,5 @@ CONTEXT: referenced column: Create Database drop schema test_get_database cascade; RESET ROLE; DROP USER test_showcreate_database; -\c postgres -drop database if exists show_createdatabase; +drop schema show_createdatabase cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/show_variables.out b/contrib/dolphin/expected/show_variables.out index ba2a875f0..1551256b1 100644 --- a/contrib/dolphin/expected/show_variables.out +++ b/contrib/dolphin/expected/show_variables.out @@ -1,7 +1,5 @@ -drop database if exists show_variables; -NOTICE: database "show_variables" does not exist, skipping -create database show_variables dbcompatibility 'b'; -\c show_variables +create schema show_variables; +set current_schema to 'show_variables'; SET datestyle TO postgres, dmy; show variables like 'DateSty%'; Variable_name | Value @@ -71,5 +69,5 @@ show global variables where variable_name = 'DateStyle'; (1 row) RESET datestyle; -\c postgres -drop database if exists show_variables; +drop schema show_variables cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/signed_unsigned_cast.out b/contrib/dolphin/expected/signed_unsigned_cast.out index 3506472cc..492d3373e 100644 --- a/contrib/dolphin/expected/signed_unsigned_cast.out +++ b/contrib/dolphin/expected/signed_unsigned_cast.out @@ -1,7 +1,5 @@ -drop database if exists signed_unsigned_cast; -NOTICE: database "signed_unsigned_cast" does not exist, skipping -create database signed_unsigned_cast dbcompatibility 'b'; -\c signed_unsigned_cast +create schema signed_unsigned_cast; +set current_schema to 'signed_unsigned_cast'; select cast(1-2 as unsigned); uint8 ---------------------- @@ -26,5 +24,5 @@ select cast(cast(1 + 5 as unsigned) as signed); 6 (1 row) -\c postgres -drop database signed_unsigned_cast; +drop schema signed_unsigned_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/single_line_trigger.out b/contrib/dolphin/expected/single_line_trigger.out index f1fcc789f..9d30c3cea 100644 --- a/contrib/dolphin/expected/single_line_trigger.out +++ b/contrib/dolphin/expected/single_line_trigger.out @@ -1,9 +1,7 @@ --create trigger -- test mysql compatibility trigger -drop database if exists db_mysql; -NOTICE: database "db_mysql" does not exist, skipping -create database db_mysql dbcompatibility 'B'; -\c db_mysql +create schema db_mysql; +set current_schema to 'db_mysql'; create table t (id int); create table t1 (id int); create table animals (id int, name char(30)); @@ -309,5 +307,5 @@ NOTICE: trigger "ins_call100" already exists, skipping drop table t, t1; drop procedure proc; reset enable_set_variable_b_format; -\c postgres -drop database db_mysql; +drop schema db_mysql cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out b/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out index 8902df3d7..f69fff744 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_ascii_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_ascii_test; +NOTICE: database "db_b_ascii_test" does not exist, skipping +create database db_b_ascii_test dbcompatibility 'A'; +\c db_b_ascii_test SELECT ASCII('a'); ascii ------- @@ -10,10 +14,10 @@ SELECT ASCII('你'); 20320 (1 row) -drop database if exists db_b_ascii_test; -NOTICE: database "db_b_ascii_test" does not exist, skipping -create database db_b_ascii_test dbcompatibility 'B'; -\c db_b_ascii_test +\c contrib_regression +drop database db_b_ascii_test; +create schema db_b_ascii_test; +set current_schema to 'db_b_ascii_test'; SELECT ASCII('a'); ascii ------- @@ -26,5 +30,5 @@ SELECT ASCII('你'); 228 (1 row) -\c postgres -drop database db_b_ascii_test; +drop schema db_b_ascii_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out b/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out index 3e17f3639..2338575f2 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_from_base64_test.out @@ -1,7 +1,5 @@ -drop database if exists from_base64; -NOTICE: database "from_base64" does not exist, skipping -create database from_base64 dbcompatibility 'b'; -\c from_base64 +create schema from_base64; +set current_schema to 'from_base64'; --测试正常base64编码作为输入,返回base64编码的解码结果 SELECT FROM_BASE64('YWJj'); from_base64 @@ -82,5 +80,6 @@ SELECT FROM_BASE64(name) from test_base64; (3 rows) -\c postgres -drop database if exists from_base64; +drop schema from_base64 cascade; +NOTICE: drop cascades to table test_base64 +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_insert_test.out b/contrib/dolphin/expected/string_func_test/db_b_insert_test.out index 87dc88453..58cff70b2 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_insert_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_insert_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_insert_test; -NOTICE: database "db_b_insert_test" does not exist, skipping -create database db_b_insert_test dbcompatibility 'B'; -\c db_b_insert_test +create schema db_b_insert_test; +set current_schema to 'db_b_insert_test'; select insert('abcdefg', 2, 4, 'yyy'); insert -------- @@ -157,5 +155,5 @@ CONTEXT: referenced column: insert abcdefg (1 row) -\c postgres -drop database db_b_insert_test; +drop schema db_b_insert_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out b/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out index f14bba904..611694721 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_left_right_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_left_right_test; +NOTICE: database "db_b_left_right_test" does not exist, skipping +create database db_b_left_right_test dbcompatibility 'A'; +\c db_b_left_right_test SELECT left('abcdefg', 3); left ------ @@ -22,10 +26,10 @@ SELECT right('abcdefg', -3); defg (1 row) -drop database if exists db_b_left_right_test; -NOTICE: database "db_b_left_right_test" does not exist, skipping -create database db_b_left_right_test dbcompatibility 'B'; -\c db_b_left_right_test +\c contrib_regression +drop database db_b_left_right_test; +create schema db_b_left_right_test; +set current_schema to 'db_b_left_right_test'; set bytea_output to escape; SELECT left('abcdefg', 3); left @@ -303,5 +307,5 @@ select right('abc',5/2); abc (1 row) -\c postgres -drop database db_b_left_right_test; +drop schema db_b_left_right_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_ord_test.out b/contrib/dolphin/expected/string_func_test/db_b_ord_test.out index 493b9e614..141e8a6e2 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_ord_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_ord_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_ord_test; -NOTICE: database "db_b_ord_test" does not exist, skipping -create database db_b_ord_test dbcompatibility 'B'; -\c db_b_ord_test +create schema db_b_ord_test; +set current_schema to 'db_b_ord_test'; -- test 1 byte select ord('1111'); ord @@ -85,5 +83,6 @@ select ord(name) from test_ord; 4036199316 (3 rows) -\c postgres -drop database if exists db_b_ord_test; +drop schema db_b_ord_test cascade; +NOTICE: drop cascades to table test_ord +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_quote_test.out b/contrib/dolphin/expected/string_func_test/db_b_quote_test.out index 8f5674c5f..6bc339d7e 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_quote_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_quote_test.out @@ -1,7 +1,5 @@ -drop database if exists db_b_quote_test; -NOTICE: database "db_b_quote_test" does not exist, skipping -create database db_b_quote_test dbcompatibility 'B'; -\c db_b_quote_test +create schema db_b_quote_test; +set current_schema to 'db_b_quote_test'; SELECT QUOTE(E'Don\'t!'); quote ----------- @@ -20,5 +18,5 @@ SELECT QUOTE('O\hello'); E'O\\hello' (1 row) -\c postgres -drop database db_b_quote_test; +drop schema db_b_quote_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out b/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out index f385e34e0..9afceacb9 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_string_length_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_string_length_test; +NOTICE: database "db_b_string_length_test" does not exist, skipping +create database db_b_string_length_test dbcompatibility 'A'; +\c db_b_string_length_test SELECT length('jose'); length -------- @@ -22,10 +26,10 @@ SELECT length('你好呀jose'); 7 (1 row) -drop database if exists db_b_string_length_test; -NOTICE: database "db_b_string_length_test" does not exist, skipping -create database db_b_string_length_test dbcompatibility 'B'; -\c db_b_string_length_test +\c contrib_regression +drop database db_b_string_length_test; +create schema db_b_string_length_test; +set current_schema to 'db_b_string_length_test'; SELECT length('jose'); length -------- @@ -50,5 +54,5 @@ SELECT length('你好呀jose'); 13 (1 row) -\c postgres -drop database db_b_string_length_test; +drop schema db_b_string_length_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_substr_test.out b/contrib/dolphin/expected/string_func_test/db_b_substr_test.out index 3a3c96dab..e419278c8 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_substr_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_substr_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_substr_test; +NOTICE: database "db_b_substr_test" does not exist, skipping +create database db_b_substr_test dbcompatibility 'A'; +\c db_b_substr_test DROP TABLE IF EXISTS template_string; NOTICE: table "template_string" does not exist, skipping CREATE TABLE template_string(a TEXT, b BYTEA); @@ -55,10 +59,10 @@ FROM template_string; (1 row) DROP TABLE IF EXISTS template_string; -drop database if exists db_b_substr_test; -NOTICE: database "db_b_substr_test" does not exist, skipping -create database db_b_substr_test dbcompatibility 'B'; -\c db_b_substr_test +\c contrib_regression +drop database db_b_substr_test; +create schema db_b_substr_test; +set current_schema to 'db_b_substr_test'; set bytea_output to escape; DROP TABLE IF EXISTS template_string; NOTICE: table "template_string" does not exist, skipping @@ -567,5 +571,9 @@ select c1, c2, substr(c1 for c2) from test_row order by c1; abcdefg | -2 | (3 rows) -\c postgres -drop database db_b_substr_test; +drop schema db_b_substr_test cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table template_string +drop cascades to table test_column +drop cascades to table test_row +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out b/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out index ae3fe6681..b5dc2c084 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_to_base64_test.out @@ -1,7 +1,5 @@ -drop database if exists to_base64_test; -NOTICE: database "to_base64_test" does not exist, skipping -create database to_base64_test dbcompatibility 'b'; -\c to_base64_test +create schema to_base64_test; +set current_schema to 'to_base64_test'; --测试字符串作为输入,返回base64编码的编码结果 SELECT TO_BASE64('123456'); to_base64 @@ -124,5 +122,6 @@ SELECT TO_BASE64(name) from test_base64; dG9fYmFzZTY0 (2 rows) -\c postgres -drop database if exists to_base64_test; +drop schema to_base64_test cascade; +NOTICE: drop cascades to table test_base64 +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_trim_test.out b/contrib/dolphin/expected/string_func_test/db_b_trim_test.out index 88354f30a..a5714ccd9 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_trim_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_trim_test.out @@ -1,3 +1,7 @@ +drop database if exists db_b_trim_test; +NOTICE: database "db_b_trim_test" does not exist, skipping +create database db_b_trim_test dbcompatibility 'A'; +\c db_b_trim_test SELECT TRIM(' bar '); btrim ------- @@ -40,10 +44,10 @@ SELECT TRIM(TRAILING 'xyz' FROM 'xyzxbarxxyz'); xyzxbar (1 row) -drop database if exists db_b_trim_test; -NOTICE: database "db_b_trim_test" does not exist, skipping -create database db_b_trim_test dbcompatibility 'B'; -\c db_b_trim_test +\c contrib_regression +drop database db_b_trim_test; +create schema db_b_trim_test; +set current_schema to 'db_b_trim_test'; SELECT TRIM(' bar '); trim ------ @@ -141,5 +145,5 @@ SELECT TRIM(TRAILING ' X '::bytea); X (1 row) -\c postgres -drop database db_b_trim_test; +drop schema db_b_trim_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out b/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out index fdc006627..e138fbda0 100644 --- a/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out +++ b/contrib/dolphin/expected/string_func_test/db_b_unhex_test.out @@ -1,7 +1,5 @@ -drop database if exists unhex_test; -NOTICE: database "unhex_test" does not exist, skipping -create database unhex_test dbcompatibility 'b'; -\c unhex_test +create schema unhex_test; +set current_schema to 'unhex_test'; --测试字符串作为输入,返回十六进制编码的编码结果 SELECT UNHEX('6f70656e4761757373'); unhex @@ -101,5 +99,6 @@ SELECT UNHEX(name) from test_unhex; openGauss (2 rows) -\c postgres -drop database if exists unhex_test; +drop schema unhex_test cascade; +NOTICE: drop cascades to table test_unhex +reset current_schema; diff --git a/contrib/dolphin/expected/string_func_test/test_substring_index.out b/contrib/dolphin/expected/string_func_test/test_substring_index.out index 2edab4fdc..933598550 100644 --- a/contrib/dolphin/expected/string_func_test/test_substring_index.out +++ b/contrib/dolphin/expected/string_func_test/test_substring_index.out @@ -1,7 +1,5 @@ -drop database if exists test_substring_index; -NOTICE: database "test_substring_index" does not exist, skipping -create database test_substring_index dbcompatibility 'b'; -\c test_substring_index +create schema test_substring_index; +set current_schema to 'test_substring_index'; SELECT SUBSTRING_INDEX('www.opengauss.com','.',0); substring_index ----------------- @@ -245,5 +243,5 @@ SELECT SUBSTRING_INDEX(myDate,'-',1) FROM myTable; (1 row) drop table myTable; -\c postgres -drop database test_substring_index; +drop schema test_substring_index cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_alter_table.out b/contrib/dolphin/expected/test_alter_table.out index 0bd954708..b2a3a980f 100644 --- a/contrib/dolphin/expected/test_alter_table.out +++ b/contrib/dolphin/expected/test_alter_table.out @@ -1,7 +1,5 @@ -drop database if exists db_alter_table; -NOTICE: database "db_alter_table" does not exist, skipping -create database db_alter_table dbcompatibility 'b'; -\c db_alter_table +create schema db_alter_table; +set current_schema to 'db_alter_table'; create table alter_table_tbl1 (a int primary key, b int); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "alter_table_tbl1_pkey" for table "alter_table_tbl1" create table alter_table_tbl2 (c int primary key, d int); @@ -81,7 +79,7 @@ show dolphin.sql_mode; reset dolphin.sql_mode; \d+ table_ddl_0030_02 - Table "public.table_ddl_0030_02" + Table "db_alter_table.table_ddl_0030_02" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- col1 | integer | | plain | | @@ -96,7 +94,7 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool); alter table test_primary add primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree; NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_primary_pkey" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -112,7 +110,7 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool); alter table test_primary add primary key (f11 desc, f12 asc) comment 'primary key' using btree; NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_primary_pkey" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -128,7 +126,7 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool); alter table test_primary add primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree using btree; NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_primary_pkey" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -144,7 +142,7 @@ create table test_unique(f31 int, f32 varchar(20)); alter table test_unique add unique using btree(f31) comment 'unique index' using btree; NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_unique_f31_key" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -159,7 +157,7 @@ create table test_unique(f31 int, f32 varchar(20)); alter table test_unique add unique (f31) comment 'unique index' using btree; NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_unique_f31_key" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -174,7 +172,7 @@ create table test_unique(f31 int, f32 varchar(20)); alter table test_unique add unique using btree(f31) comment 'unique index' using btree using btree; NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_unique_f31_key" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -188,7 +186,7 @@ drop table test_unique; create table test_unique(f31 int, f32 varchar(20), constraint con_t_unique unique using btree(f31, f32) comment 'unique index' using btree); NOTICE: CREATE TABLE / UNIQUE will create implicit index "con_t_unique" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -202,7 +200,7 @@ drop table test_unique; create table test_unique(f31 int, f32 varchar(20), constraint con_t_unique unique (f31, f32) comment 'unique index' using btree); NOTICE: CREATE TABLE / UNIQUE will create implicit index "con_t_unique" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -216,7 +214,7 @@ drop table test_unique; create table test_unique(f31 int, f32 varchar(20), constraint con_t_unique unique (f31, f32) comment 'unique index' using btree using btree); NOTICE: CREATE TABLE / UNIQUE will create implicit index "con_t_unique" for table "test_unique" \d+ test_unique - Table "public.test_unique" + Table "db_alter_table.test_unique" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f31 | integer | | plain | | @@ -230,7 +228,7 @@ drop table test_unique; create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_pri primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "con_t_pri" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -245,7 +243,7 @@ drop table test_primary; create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_pri primary key (f11 desc, f12 asc) comment 'primary key' using btree); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "con_t_pri" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -260,7 +258,7 @@ drop table test_primary; create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_pri primary key using btree(f11 desc, f12 asc) comment 'primary key' using btree using btree); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "con_t_pri" for table "test_primary" \d+ test_primary - Table "public.test_primary" + Table "db_alter_table.test_primary" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -272,5 +270,5 @@ Has OIDs: no Options: orientation=row, compression=no drop table test_primary; -\c postgres -drop database if exists db_alter_table; +drop schema db_alter_table cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_binary.out b/contrib/dolphin/expected/test_binary.out index 4896028c6..207e9943d 100644 --- a/contrib/dolphin/expected/test_binary.out +++ b/contrib/dolphin/expected/test_binary.out @@ -1,7 +1,5 @@ -drop database if exists test_binary; -NOTICE: database "test_binary" does not exist, skipping -create database test_binary dbcompatibility 'B'; -\c test_binary +create schema test_binary; +set current_schema to 'test_binary'; create table binary_templates (a bytea, b binary(5), c varbinary(5)); -- invalid typmod create table invalid_table (b binary(-1)); @@ -56,7 +54,7 @@ create index on test_index using btree (b); create index on test_index using hash (b); create index on test_index using gin (to_tsvector(b::text)); \d test_index - Table "public.test_index" + Table "test_binary.test_index" Column | Type | Modifiers --------+-----------------+----------- a | "binary"(10) | @@ -121,5 +119,6 @@ select * from t_varbinary_061; drop table if exists t_binary_061; drop table if exists t_varbinary_061; -\c postgres -drop database test_binary; +drop schema test_binary cascade; +NOTICE: drop cascades to table test_bytea +reset current_schema; diff --git a/contrib/dolphin/expected/test_bit_xor.out b/contrib/dolphin/expected/test_bit_xor.out index b065f0e90..6ea0befad 100644 --- a/contrib/dolphin/expected/test_bit_xor.out +++ b/contrib/dolphin/expected/test_bit_xor.out @@ -1,7 +1,5 @@ -drop database if exists test_bit_xor; -NOTICE: database "test_bit_xor" does not exist, skipping -create database test_bit_xor dbcompatibility 'B'; -\c test_bit_xor +create schema test_bit_xor; +set current_schema to 'test_bit_xor'; -- test datetime create table test_datetime (t datetime); select bit_xor(t) from test_datetime; @@ -765,5 +763,12 @@ select bit_xor(col) from test_varbit; (1 row) drop table test_varbit; -\c postgres -drop database test_bit_xor; +drop schema test_bit_xor cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table test_time +drop cascades to table test_time_with_zone +drop cascades to table test_time_with_null +drop cascades to table test_time_with_zone_with_null +drop cascades to table test_date +drop cascades to table test_bit +reset current_schema; diff --git a/contrib/dolphin/expected/test_blob.out b/contrib/dolphin/expected/test_blob.out index 7a680dfcb..a6eb644e7 100644 --- a/contrib/dolphin/expected/test_blob.out +++ b/contrib/dolphin/expected/test_blob.out @@ -1,7 +1,5 @@ -drop database if exists test_blob; -NOTICE: database "test_blob" does not exist, skipping -create database test_blob dbcompatibility 'B'; -\c test_blob +create schema test_blob; +set current_schema to 'test_blob'; create table test_template (t tinyblob, b blob, m mediumblob, l longblob); insert into test_template values('aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa'); create table test_tiny (t tinyblob); @@ -79,5 +77,5 @@ drop table test_tiny; drop table test_blob; drop table test_medium; drop table test_long; -\c postgres -drop database test_blob; +drop schema test_blob cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_condition.out b/contrib/dolphin/expected/test_condition.out index 6f0db9ac3..1ac8b8681 100644 --- a/contrib/dolphin/expected/test_condition.out +++ b/contrib/dolphin/expected/test_condition.out @@ -1,9 +1,5 @@ --- b compatibility case -drop database if exists db_test_condition; -NOTICE: database "db_test_condition" does not exist, skipping --- create database db_test_condition dbcompatibility 'b'; -create database db_test_condition with DBCOMPATIBILITY = 'B'; -\c db_test_condition +create schema db_test_condition; +set current_schema to 'db_test_condition'; set dolphin.sql_mode = ''; create table test_bccf (t1 int ,t2 float, t3 char, t4 text); insert into test_bccf values(1,3,null,null); @@ -6998,5 +6994,6 @@ select strcmp(blb, txt) from typeset; -1 (1 row) -\c postgres -drop database db_test_condition; +drop schema db_test_condition cascade; +NOTICE: drop cascades to table typeset +reset current_schema; diff --git a/contrib/dolphin/expected/test_current_user.out b/contrib/dolphin/expected/test_current_user.out index b485d2164..d0f6cc638 100644 --- a/contrib/dolphin/expected/test_current_user.out +++ b/contrib/dolphin/expected/test_current_user.out @@ -1,7 +1,5 @@ -drop database if exists test_current_user; -NOTICE: database "test_current_user" does not exist, skipping -create database test_current_user dbcompatibility 'b'; -\c test_current_user +create schema test_current_user; +set current_schema to 'test_current_user'; select current_user; current_user -------------- @@ -86,5 +84,5 @@ DROP USER MAPPING FOR USER SERVER s1; CREATE USER MAPPING FOR u1 SERVER s1; DROP USER MAPPING FOR u1 SERVER s1; drop user u1; -\c postgres -drop database test_current_user; +drop schema test_current_user cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_datatype.out b/contrib/dolphin/expected/test_datatype.out index bd569bf8b..1939f2d03 100644 --- a/contrib/dolphin/expected/test_datatype.out +++ b/contrib/dolphin/expected/test_datatype.out @@ -1,7 +1,5 @@ -drop database if exists b_datatype_test; -NOTICE: database "b_datatype_test" does not exist, skipping -create database b_datatype_test dbcompatibility 'B'; -\c b_datatype_test +create schema b_datatype_test; +set current_schema to 'b_datatype_test'; -- bit(n), when insert into bit, support the length less than n, which must be equal to n in normal case create table bit_test(a bit); create table bit_test2(a bit(5)); @@ -262,7 +260,7 @@ select b'11'::bit(33); --tinyint(n),smallint(n),mediumint,mediumint(n),int(n),bigint(n) create table all_int_test(a tinyint(9999999999), b smallint(9999999999), c mediumint, d mediumint(9999999999), e int(9999999999), f bigint(9999999999)); \d all_int_test - Table "public.all_int_test" +Table "b_datatype_test.all_int_test" Column | Type | Modifiers --------+----------+----------- a | tinyint | @@ -273,5 +271,5 @@ create table all_int_test(a tinyint(9999999999), b smallint(9999999999), c mediu f | bigint | drop table all_int_test; -\c postgres -drop database b_datatype_test; +drop schema b_datatype_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_fixed.out b/contrib/dolphin/expected/test_fixed.out index 22f456ee2..14d7655f9 100644 --- a/contrib/dolphin/expected/test_fixed.out +++ b/contrib/dolphin/expected/test_fixed.out @@ -1,16 +1,14 @@ -drop database if exists test_fixed; -NOTICE: database "test_fixed" does not exist, skipping -create database test_fixed dbcompatibility 'B'; -\c test_fixed +create schema test_fixed; +set current_schema to 'test_fixed'; DROP TABLE IF EXISTS fixed_test; NOTICE: table "fixed_test" does not exist, skipping CREATE TABLE fixed_test (a fixed(10, 5)); \d fixed_test - Table "public.fixed_test" + Table "test_fixed.fixed_test" Column | Type | Modifiers --------+---------------+----------- a | numeric(10,5) | DROP TABLE fixed_test; -\c postgres -drop database test_fixed; +drop schema test_fixed cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out b/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out index 23ad8e8c5..a75277ea2 100644 --- a/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out +++ b/contrib/dolphin/expected/test_float_double_real_double_precision_MD.out @@ -1,10 +1,8 @@ -drop database if exists float_double_real_double_precision_MD; -NOTICE: database "float_double_real_double_precision_md" does not exist, skipping -create database float_double_real_double_precision_MD dbcompatibility 'b'; -\c float_double_real_double_precision_MD; +create schema double_precision; +set current_schema to 'double_precision'; create table test(a float(20, 2), b double(20, 2), c real(20, 2), d double precision(20, 2)); \d test; - Table "public.test" + Table "double_precision.test" Column | Type | Modifiers --------+---------------+----------- a | numeric(20,2) | @@ -118,8 +116,15 @@ LINE 1: create table test3(a double precision(3.6, 1.6)); ^ create table test3(a double(3.6, 1.6)); \d test3; - Table "public.test3" + Table "double_precision.test3" Column | Type | Modifiers --------+--------------+----------- a | numeric(4,2) | +drop schema double_precision cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table test +drop cascades to table test1 +drop cascades to table test2 +drop cascades to table test3 +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_char.out b/contrib/dolphin/expected/test_mysql_char.out index 5fa3ac6a3..178e6f2ae 100644 --- a/contrib/dolphin/expected/test_mysql_char.out +++ b/contrib/dolphin/expected/test_mysql_char.out @@ -1,7 +1,5 @@ -drop database if exists test_char; -NOTICE: database "test_char" does not exist, skipping -create database test_char with dbcompatibility='B'; -\c test_char +create schema test_char; +set current_schema to 'test_char'; set dolphin.b_compatibility_mode=1; set dolphin.sql_mode = ''; ---create table @@ -559,5 +557,5 @@ select '0.0100abc' || null; t (1 row) -\c postgres -drop database test_char; +drop schema test_char cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_enum.out b/contrib/dolphin/expected/test_mysql_enum.out index d5b5465dc..692deb0ad 100644 --- a/contrib/dolphin/expected/test_mysql_enum.out +++ b/contrib/dolphin/expected/test_mysql_enum.out @@ -1,7 +1,5 @@ -drop database if exists test_enum; -NOTICE: database "test_enum" does not exist, skipping -CREATE DATABASE test_enum with dbcompatibility='B'; -\c test_enum +create schema test_enum; +set current_schema to 'test_enum'; -- create extension dolphin; show sql_compatibility; sql_compatibility @@ -113,10 +111,10 @@ CREATE TABLE testtttttttttttttttttttttttttttttttttt ( age INT, myjobbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb enum('x','y') ); -SELECT * FROM pg_type WHERE typname like '%anonymous_enum%'; - typname | typnamespace | typowner | typlen | typbyval | typtype | typcategory | typispreferred | typisdefined | typdelim | typrelid | typelem | typarray | typinput | typoutput | typreceive | typsend | typmodin | typmodout | typanalyze | typalign | typstorage | typnotnull | typbasetype | typtypmod | typndims | typcollation | typdefaultbin | typdefault | typacl ------------------------------------------------------------------+--------------+----------+--------+----------+---------+-------------+----------------+--------------+----------+----------+---------+----------+----------+-----------+------------+-----------+----------+-----------+------------+----------+------------+------------+-------------+-----------+----------+--------------+---------------+------------+-------- - testtttttttttttttttt_myjobbbbbbbbbbbbbbbb_2200_anonymous_enum_1 | 2200 | 10 | 4 | t | e | E | f | t | , | 0 | 0 | 0 | enum_in | enum_out | enum_recv | enum_send | - | - | - | i | p | f | 0 | -1 | 0 | 0 | | | +SELECT count(*) FROM pg_type WHERE typname like 'testtttttttttttttttt_myjobbbbbbbbbbbbbbb%_anonymous_enum_1'; + count +------- + 1 (1 row) drop table testtttttttttttttttttttttttttttttttttt; @@ -240,5 +238,6 @@ W_COUNTRY VARCHAR(20) , W_GMT_OFFSET DECIMAL(5,2) ); ERROR: anoymous enum type does not support foreign key -\c postgres -DROP DATABASE test_enum; +drop schema test_enum cascade; +--?.* +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_operator.out b/contrib/dolphin/expected/test_mysql_operator.out index ec62c1fd7..14cb64d1a 100644 --- a/contrib/dolphin/expected/test_mysql_operator.out +++ b/contrib/dolphin/expected/test_mysql_operator.out @@ -1,7 +1,5 @@ -drop database if exists test_op_and; -NOTICE: database "test_op_and" does not exist, skipping -CREATE DATABASE test_op_and with dbcompatibility='B'; -\c test_op_and +create schema test_op_and; +set current_schema to 'test_op_and'; set dolphin.b_compatibility_mode = 1; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group'; ---create table @@ -1076,12 +1074,10 @@ drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -\c postgres -drop database test_op_and; -drop database if exists test_op_xor; -NOTICE: database "test_op_xor" does not exist, skipping -CREATE DATABASE test_op_xor with dbcompatibility='B'; -\c test_op_xor +drop schema test_op_and cascade; +reset current_schema; +create schema test_op_xor; +set current_schema to 'test_op_xor'; set dolphin.b_compatibility_mode = 1; select null^1; ?column? @@ -1555,12 +1551,10 @@ drop table testforint2_p4; drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; -\c postgres -drop database test_op_xor; -drop database if exists like_test; -NOTICE: database "like_test" does not exist, skipping -create database like_test DBCOMPATIBILITY 'b'; -\c like_test +drop schema test_op_xor cascade; +reset current_schema; +create schema like_test; +set current_schema to 'like_test'; set dolphin.b_compatibility_mode = 1; select 'a' like 'A'; ?column? @@ -2523,5 +2517,5 @@ select 10!; 3628800 (1 row) -\c postgres -drop database if exists like_test; +drop schema like_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_mysql_prepare.out b/contrib/dolphin/expected/test_mysql_prepare.out index 4251ab9a1..d31f14a10 100644 --- a/contrib/dolphin/expected/test_mysql_prepare.out +++ b/contrib/dolphin/expected/test_mysql_prepare.out @@ -1,7 +1,5 @@ -drop database if exists test_mysql_prepare; -NOTICE: database "test_mysql_prepare" does not exist, skipping -create database test_mysql_prepare dbcompatibility 'b'; -\c test_mysql_prepare +create schema test_mysql_prepare; +set current_schema to 'test_mysql_prepare'; create table test(name text, age int); insert into test values('a',18); prepare s1 as select * from test; @@ -95,11 +93,11 @@ explain (costs off, verbose on) execute p1; Nested Loop Output: prepare_table_01.a, prepare_table_01.b, prepare_table_02.a, prepare_table_02.b Join Filter: (prepare_table_01.a = prepare_table_02.a) - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a, prepare_table_01.b -> Materialize Output: prepare_table_02.a, prepare_table_02.b - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a, prepare_table_02.b (9 rows) @@ -123,12 +121,12 @@ explain (costs off, verbose on) execute p2(1); Nested Loop Output: prepare_table_01.a, prepare_table_01.b, prepare_table_02.a, prepare_table_02.b Join Filter: (prepare_table_01.a = prepare_table_02.a) - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a, prepare_table_01.b Filter: (prepare_table_01.b = ($1)::numeric) -> Materialize Output: prepare_table_02.a, prepare_table_02.b - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a, prepare_table_02.b (10 rows) @@ -188,8 +186,8 @@ ERROR: wrong number of parameters for prepared statement "p2" DETAIL: Expected 1 parameters but got 2. prepare p5 as 'select a from prepare_table_01 INTERSECT select a from prepare_table_02 order by 1 limit 1'; explain (costs off, verbose on) execute p5; - QUERY PLAN -------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------- Limit Output: "*SELECT* 1".a, (0) -> Sort @@ -200,11 +198,11 @@ explain (costs off, verbose on) execute p5; -> Append -> Subquery Scan on "*SELECT* 1" Output: "*SELECT* 1".a, 0 - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a -> Subquery Scan on "*SELECT* 2" Output: "*SELECT* 2".a, 1 - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a (16 rows) @@ -222,11 +220,11 @@ explain (costs off, verbose on) execute p6; Output: prepare_table_01.a, prepare_table_01.b Group By Key: prepare_table_01.a, prepare_table_01.b -> Append - -> Seq Scan on public.prepare_table_01 + -> Seq Scan on test_mysql_prepare.prepare_table_01 Output: prepare_table_01.a, prepare_table_01.b -> Subquery Scan on "*SELECT* 2" Output: "*SELECT* 2".a, "*SELECT* 2".b - -> Seq Scan on public.prepare_table_02 + -> Seq Scan on test_mysql_prepare.prepare_table_02 Output: prepare_table_02.a, prepare_table_02.b (10 rows) @@ -328,16 +326,16 @@ insert into t1_xc_fqs values (1,1,1), (2,2,2), (3,3,3), (4,4,4), (5,5,5); explain (costs off, verbose on) execute s using 1,@a; QUERY PLAN ----------------------------------------------------------- - Seq Scan on public.t1_xc_fqs + Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, num Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = $2)) (3 rows) explain (costs off, verbose on) execute i using 6,6,6; - QUERY PLAN ----------------------------- + QUERY PLAN +---------------------------------------- [Bypass] - Insert on public.t1_xc_fqs + Insert on test_mysql_prepare.t1_xc_fqs -> Result Output: $1, $2, $3 (4 rows) @@ -345,8 +343,8 @@ explain (costs off, verbose on) execute i using 6,6,6; explain (costs off, verbose on) execute u using 2,@b; QUERY PLAN ----------------------------------------------------------------- - Update on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Update on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, 0, ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = $2)) (4 rows) @@ -354,8 +352,8 @@ explain (costs off, verbose on) execute u using 2,@b; explain (costs off, verbose on) execute d using @c,@c; QUERY PLAN ----------------------------------------------------------------- - Delete on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Delete on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = $2)) (4 rows) @@ -414,16 +412,16 @@ insert into t1_xc_fqs values (1,1,1), (2,2,2), (3,3,3), (4,4,4), (5,5,5); explain (costs off, verbose on) execute s using @a; QUERY PLAN ---------------------------------------------------------- - Seq Scan on public.t1_xc_fqs + Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, num Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = 2)) (3 rows) explain (costs off, verbose on) execute i using 6; - QUERY PLAN ----------------------------- + QUERY PLAN +---------------------------------------- [Bypass] - Insert on public.t1_xc_fqs + Insert on test_mysql_prepare.t1_xc_fqs -> Result Output: $1, 2, 3 (4 rows) @@ -431,8 +429,8 @@ explain (costs off, verbose on) execute i using 6; explain (costs off, verbose on) execute u using 2; QUERY PLAN ---------------------------------------------------------------- - Update on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Update on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: id1, id2, 1, ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = 2)) (4 rows) @@ -440,8 +438,8 @@ explain (costs off, verbose on) execute u using 2; explain (costs off, verbose on) execute d using @c; QUERY PLAN ---------------------------------------------------------------- - Delete on public.t1_xc_fqs - -> Seq Scan on public.t1_xc_fqs + Delete on test_mysql_prepare.t1_xc_fqs + -> Seq Scan on test_mysql_prepare.t1_xc_fqs Output: ctid Filter: ((t1_xc_fqs.id1 = $1) AND (t1_xc_fqs.id2 = 2)) (4 rows) @@ -510,12 +508,12 @@ explain (costs off, verbose on) execute s0 using @a,@a; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) @@ -525,33 +523,33 @@ explain (costs off, verbose on) execute s0 using @a,3; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) explain (costs off, verbose on) execute s1 using 2,@b,2; - QUERY PLAN ----------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $3) (16 rows) @@ -561,55 +559,55 @@ explain (costs off, verbose on) execute s2 using @c; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $1) (10 rows) explain (costs off, verbose on) execute s3 using 4; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Hash Join Output: t3.id11 Hash Cond: (t1.id1 = t2.id1) -> Nested Loop Output: t3.id11, t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) -> Hash Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 (16 rows) explain (costs off, verbose on) execute s4 using 5; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop Output: t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 Filter: (t2.id1 = $1) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) (17 rows) @@ -619,10 +617,10 @@ explain (costs off, verbose on) execute s5 using 4,5; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: ((t2.id2 = $2) AND (t2.id1 = $1)) - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $2) (8 rows) @@ -691,12 +689,12 @@ explain (costs off, verbose on) execute s0 using @a,@a; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) @@ -706,33 +704,33 @@ explain (costs off, verbose on) execute s0 using @a,@c; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) (10 rows) explain (costs off, verbose on) execute s1 using 2,@b,2; - QUERY PLAN ----------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $2) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $3) (16 rows) @@ -742,55 +740,55 @@ explain (costs off, verbose on) execute s2 using @c; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $1) (10 rows) explain (costs off, verbose on) execute s3 using 4; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Hash Join Output: t3.id11 Hash Cond: (t1.id1 = t2.id1) -> Nested Loop Output: t3.id11, t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) -> Hash Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 (16 rows) explain (costs off, verbose on) execute s4 using 5; - QUERY PLAN ---------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------- Nested Loop Output: t3.id11 -> Nested Loop Output: t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $1) -> Materialize Output: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1 Filter: (t2.id1 = $1) -> Materialize Output: t3.id11 - -> Seq Scan on public.t3_xc_fqs t3 + -> Seq Scan on test_mysql_prepare.t3_xc_fqs t3 Output: t3.id11 Filter: (t3.id11 = $1) (17 rows) @@ -800,10 +798,10 @@ explain (costs off, verbose on) execute s5 using 4,5; ---------------------------------------------------------- Nested Loop Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: ((t2.id2 = $2) AND (t2.id1 = $1)) - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num Filter: (t1.id1 = $2) (8 rows) @@ -872,12 +870,12 @@ explain (costs off, verbose on) execute s0 using @a; -> Sort Output: t1.id1, t1.id2, t1.num Sort Key: t1.id1 - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Sort Output: t2.id1, t2.id2, t2.num Sort Key: t2.id1 - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num Filter: (t2.id1 = $1) (14 rows) @@ -892,11 +890,11 @@ explain (costs off, verbose on) execute s1 using 1; Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num, t2.id1 Hash Cond: (t2.id1 = t1.id1) Join Filter: (t2.id1 = $1) - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num -> Hash Output: t1.id1, t1.id2, t1.num - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num (13 rows) @@ -910,11 +908,11 @@ explain (costs off, verbose on) execute s2 using 1; Output: t1.id1, t1.id2, t1.num, t2.id1, t2.id2, t2.num, t1.id1, t2.id1 Hash Cond: (t1.id1 = t2.id1) Join Filter: (t2.id1 = $1) - -> Seq Scan on public.t1_xc_fqs t1 + -> Seq Scan on test_mysql_prepare.t1_xc_fqs t1 Output: t1.id1, t1.id2, t1.num -> Hash Output: t2.id1, t2.id2, t2.num - -> Seq Scan on public.t2_xc_fqs t2 + -> Seq Scan on test_mysql_prepare.t2_xc_fqs t2 Output: t2.id1, t2.id2, t2.num (13 rows) @@ -957,5 +955,9 @@ deallocate s1; deallocate s2; reset dolphin.b_compatibility_mode; reset enable_set_variable_b_format; -\c postgres -drop database test_mysql_prepare; +drop schema test_mysql_prepare cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table t1_xc_fqs +drop cascades to table t2_xc_fqs +drop cascades to table t3_xc_fqs +reset current_schema; diff --git a/contrib/dolphin/expected/test_op_blob.out b/contrib/dolphin/expected/test_op_blob.out index daee4e2a8..602e43e46 100644 --- a/contrib/dolphin/expected/test_op_blob.out +++ b/contrib/dolphin/expected/test_op_blob.out @@ -1,7 +1,5 @@ -drop database if exists test_op_blob; -NOTICE: database "test_op_blob" does not exist, skipping -create database test_op_blob dbcompatibility 'b'; -\c test_op_blob +create schema test_op_blob; +set current_schema to 'test_op_blob'; select '1'::blob ^ '1'::blob; ?column? ---------- @@ -81,5 +79,5 @@ select '1'::blob ^ 11::numeric; 10 (1 row) -\c postgres -drop database test_op_blob; +drop schema test_op_blob cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_op_xor_boolandfloat.out b/contrib/dolphin/expected/test_op_xor_boolandfloat.out index 09ac50204..c72f6447d 100644 --- a/contrib/dolphin/expected/test_op_xor_boolandfloat.out +++ b/contrib/dolphin/expected/test_op_xor_boolandfloat.out @@ -1,7 +1,5 @@ -drop database if exists test_bool_float; -NOTICE: database "test_bool_float" does not exist, skipping -create database test_bool_float dbcompatibility 'b'; -\c test_bool_float +create schema test_bool_float; +set current_schema to 'test_bool_float'; set dolphin.b_compatibility_mode = true; select 1::bool ^ 2::int1; ?column? @@ -862,5 +860,5 @@ select (-1)::numeric ^ (-2)::numeric; (1 row) set dolphin.b_compatibility_mode = false; -\c postgres -drop database test_bool_float; +drop schema test_bool_float cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_op_xor_unsignedint.out b/contrib/dolphin/expected/test_op_xor_unsignedint.out index 3fb73844a..229697f0c 100644 --- a/contrib/dolphin/expected/test_op_xor_unsignedint.out +++ b/contrib/dolphin/expected/test_op_xor_unsignedint.out @@ -1,7 +1,5 @@ -drop database if exists test_op_xor_unsignedint; -NOTICE: database "test_op_xor_unsignedint" does not exist, skipping -create database test_op_xor_unsignedint with dbcompatibility='B'; -\c test_op_xor_unsignedint +create schema test_op_xor_unsignedint; +set current_schema to 'test_op_xor_unsignedint'; select (-1)::uint1 ^ 2::int1; ?column? ---------- @@ -740,5 +738,5 @@ select 2 ::uint8 ^ 2::varchar; 0 (1 row) -\c postgres -drop database test_op_xor_unsignedint; +drop schema test_op_xor_unsignedint cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_optimize.out b/contrib/dolphin/expected/test_optimize.out index da70b5064..2bca4797b 100644 --- a/contrib/dolphin/expected/test_optimize.out +++ b/contrib/dolphin/expected/test_optimize.out @@ -1,7 +1,5 @@ -drop database if exists db_optimize; -NOTICE: database "db_optimize" does not exist, skipping -create database db_optimize dbcompatibility 'b'; -\c db_optimize +create schema db_optimize; +set current_schema to 'db_optimize'; create table doc(id serial primary key, content varchar(255)); NOTICE: CREATE TABLE will create implicit sequence "doc_id_seq" for serial column "doc.id" NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "doc_pkey" for table "doc" @@ -12,5 +10,5 @@ drop table doc; set xc_maintenance_mode = on; optimize table pg_class; set xc_maintenance_mode = off; -\c postgres -drop database if exists db_optimize; +drop schema db_optimize cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_schema.out b/contrib/dolphin/expected/test_schema.out index de83392ff..4815344de 100644 --- a/contrib/dolphin/expected/test_schema.out +++ b/contrib/dolphin/expected/test_schema.out @@ -1,12 +1,10 @@ -drop database if exists schema_test; -NOTICE: database "schema_test" does not exist, skipping -create database schema_test dbcompatibility 'b'; -\c schema_test +create schema schema_test; +set current_schema to 'schema_test'; SELECT SCHEMA(); - schema --------- - public + schema +------------- + schema_test (1 row) -\c postgres -drop database if exists schema_test; +drop schema schema_test cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_set_charset.out b/contrib/dolphin/expected/test_set_charset.out index c92f9957d..b7f15c87f 100644 --- a/contrib/dolphin/expected/test_set_charset.out +++ b/contrib/dolphin/expected/test_set_charset.out @@ -1,7 +1,5 @@ -drop database if exists db_charset; -NOTICE: database "db_charset" does not exist, skipping -create database db_charset dbcompatibility 'b'; -\c db_charset +create schema db_charset; +set current_schema to 'db_charset'; show client_encoding; client_encoding ----------------- @@ -64,5 +62,5 @@ show client_encoding; UTF8 (1 row) -\c postgres -drop database if exists db_charset; +drop schema db_charset cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows.out b/contrib/dolphin/expected/test_shows.out index b1b59c76e..acc5af398 100644 --- a/contrib/dolphin/expected/test_shows.out +++ b/contrib/dolphin/expected/test_shows.out @@ -1,7 +1,5 @@ -drop database if exists db_show; -NOTICE: database "db_show" does not exist, skipping -create database db_show dbcompatibility 'b'; -\c db_show +create schema db_show; +set current_schema to 'db_show'; show processlist; --? Id | Pid | QueryId | UniqueSqlId | User | Host | db | .* | BackendStart | XactStart | .* | State | .* --?.* @@ -28,5 +26,5 @@ show full processlist; --?.* (8 rows) -\c postgres -drop database if exists db_show; +drop schema db_show cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows_3.out b/contrib/dolphin/expected/test_shows_3.out index 5f6941226..6014cce6b 100644 --- a/contrib/dolphin/expected/test_shows_3.out +++ b/contrib/dolphin/expected/test_shows_3.out @@ -1,7 +1,5 @@ -drop database if exists db_show_3; -NOTICE: database "db_show_3" does not exist, skipping -create database db_show_3 dbcompatibility 'b'; -\c db_show_3 +create schema db_show_3; +set current_schema to 'db_show_3'; show databases; Database -------------------- @@ -9,6 +7,8 @@ show databases; blockchain cstore db4ai + db_b_new_gram_test + db_show_3 dbe_perf dbe_pldebugger dbe_pldeveloper @@ -17,9 +17,10 @@ show databases; pg_toast pkg_service public + sc snapshot sqladvisor -(14 rows) +(17 rows) create schema aa1; create schema aa2; @@ -89,7 +90,8 @@ show databases; u2 (9 rows) -\c postgres -drop database if exists db_show_3; +\c contrib_regression drop user u1; drop user u2; +drop schema db_show_3 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows_4.out b/contrib/dolphin/expected/test_shows_4.out index 11cde9404..e8162331f 100644 --- a/contrib/dolphin/expected/test_shows_4.out +++ b/contrib/dolphin/expected/test_shows_4.out @@ -1,7 +1,5 @@ -drop database if exists db_show_4; -NOTICE: database "db_show_4" does not exist, skipping -create database db_show_4 dbcompatibility 'b'; -\c db_show_4 +create schema db_show_4; +set current_schema to 'db_show_4'; show master status; Xlog_File_Name | Xlog_File_Offset | Xlog_Lsn --------------------------+------------------+----------- @@ -13,5 +11,5 @@ show slave hosts; -----+----------+---------+------------------+-------------+-----------------+-------------+---------------+-------+----------------------+-------------------------+-------------------------+--------------------------+---------------+------------ (0 rows) -\c postgres -drop database if exists db_show_4; +drop schema db_show_4 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_shows_5.out b/contrib/dolphin/expected/test_shows_5.out index 8ae176e49..caa48fbda 100644 --- a/contrib/dolphin/expected/test_shows_5.out +++ b/contrib/dolphin/expected/test_shows_5.out @@ -1,10 +1,8 @@ -DROP DATABASE IF EXISTS db_show_5; -NOTICE: database "db_show_5" does not exist, skipping -CREATE DATABASE db_show_5 DBCOMPATIBILITY 'b'; -\c db_show_5 +create schema db_show_5; +set current_schema to 'db_show_5'; CREATE SCHEMA tst_schema5; --orientation=row, normal primary key -CREATE TABLE public.t1 +CREATE TABLE db_show_5.t1 ( id int primary key, name varchar(20), @@ -12,7 +10,7 @@ phone text ) WITH(ORIENTATION=ROW, STORAGE_TYPE=USTORE); NOTICE: CREATE TABLE / PRIMARY KEY will create implicit index "t1_pkey" for table "t1" --orientation=column, serial primary key -CREATE TABLE public.t2 +CREATE TABLE db_show_5.t2 ( id serial primary key, name varchar(20), @@ -50,9 +48,7 @@ SHOW TABLE STATUS; --?.* --?.* --?.* ---?.* ---?.* -(4 rows) +(2 rows) SHOW TABLE STATUS FROM tst_schema5; --? Name | Engine | Version | Row_format | Rows | Avg_row_length | Data_length | Max_data_length | Index_length | Data_free | Auto_increment | Create_time | Update_time | Check_time | Collation | Checksum | Create_options | Comment @@ -119,5 +115,8 @@ RESET ROLE; REVOKE SELECT ON ALL TABLES IN SCHEMA tst_schema5 FROM tst_shows_u5; REVOKE SELECT ON ALL SEQUENCES IN SCHEMA tst_schema5 FROM tst_shows_u5; DROP USER tst_shows_u5; -\c postgres -DROP DATABASE IF EXISTS db_show_5; +drop schema db_show_5 cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +reset current_schema; diff --git a/contrib/dolphin/expected/test_system_user.out b/contrib/dolphin/expected/test_system_user.out index 9153494c1..5711d4a7b 100644 --- a/contrib/dolphin/expected/test_system_user.out +++ b/contrib/dolphin/expected/test_system_user.out @@ -1,7 +1,5 @@ -drop database if exists test_system_user; -NOTICE: database "test_system_user" does not exist, skipping -create database test_system_user dbcompatibility 'b'; -\c test_system_user +create schema test_system_user; +set current_schema to 'test_system_user'; select session_user; session_user -------------- @@ -32,5 +30,5 @@ select system_user(); --?.* (1 row) -\c postgres -drop database test_system_user; +drop schema test_system_user cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/test_table_index.out b/contrib/dolphin/expected/test_table_index.out index 4e8cf4587..f2741889f 100644 --- a/contrib/dolphin/expected/test_table_index.out +++ b/contrib/dolphin/expected/test_table_index.out @@ -1,8 +1,5 @@ ----- b compatibility case -drop database if exists test_table_index; -NOTICE: database "test_table_index" does not exist, skipping -create database test_table_index dbcompatibility 'b'; -\c test_table_index +create schema test_table_index; +set current_schema to 'test_table_index'; -- test crate normal table create table t1(f1 int , index(f1)); create table t2(f1 int , index idx_f2(f1)); @@ -10,7 +7,7 @@ create table t3(f1 int , index idx_f3 using btree (f1)); create table t4(f1 int , index idx_f4 using btree (f1 desc)); create table t5(f1 int , key idx_f5 using btree (f1 asc)); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -20,7 +17,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t2 - Table "public.t2" + Table "test_table_index.t2" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -30,7 +27,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t3 - Table "public.t3" + Table "test_table_index.t3" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -40,7 +37,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t4 - Table "public.t4" + Table "test_table_index.t4" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -50,7 +47,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ t5 - Table "public.t5" + Table "test_table_index.t5" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -68,7 +65,7 @@ create table test_expr1(f1 int, f2 int, f3 int, index tbl_idx1 using btree(f1 de create table test_expr2(f1 int, f2 int, f3 int, index tbl_idx2 using btree((abs(f1)) desc, f2 asc)); create table test_expr3(f1 int, f2 int, f3 int, index tbl_idx3 using btree((abs(f1)+10) desc, f2 asc)); \d+ test_expr1 - Table "public.test_expr1" + Table "test_table_index.test_expr1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -80,7 +77,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ test_expr2 - Table "public.test_expr2" + Table "test_table_index.test_expr2" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -92,7 +89,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ test_expr3 - Table "public.test_expr3" + Table "test_table_index.test_expr3" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -113,12 +110,12 @@ create table text_column_table(f11 int, f12 varchar(20), f13 bool, index (f11)) create table test_gist (t tsquery, s tsvector,index using gist(t)); create table test_gin (t tsquery, s tsvector,index using gin(s)); create table text_column_table(f1 int, index(f1)) with (orientation=column); -ERROR: relation "text_column_table" already exists in schema "public" +ERROR: relation "text_column_table" already exists in schema "test_table_index" DETAIL: creating new table with existing name in the same schema create table text_column_table_expr(f1 int, unique((f1+1))) with (orientation=column); ERROR: access method "cbtree" does not support index expressions \d+ test_ubtree - Table "public.test_ubtree" + Table "test_table_index.test_ubtree" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -130,7 +127,7 @@ Has OIDs: no Options: orientation=row, storage_type=ustore, compression=no \d+ test_gist - Table "public.test_gist" + Table "test_table_index.test_gist" Column | Type | Modifiers | Storage | Stats target | Description --------+----------+-----------+----------+--------------+------------- t | tsquery | | plain | | @@ -141,7 +138,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ test_gin - Table "public.test_gin" + Table "test_table_index.test_gin" Column | Type | Modifiers | Storage | Stats target | Description --------+----------+-----------+----------+--------------+------------- t | tsquery | | plain | | @@ -152,7 +149,7 @@ Has OIDs: no Options: orientation=row, compression=no \d+ text_column_table - Table "public.text_column_table" + Table "test_table_index.text_column_table" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | | plain | | @@ -185,7 +182,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_btree - Table "public.test_partition_btree" + Table "test_table_index.test_partition_btree" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -214,7 +211,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_index - Table "public.test_partition_index" + Table "test_table_index.test_partition_index" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -243,7 +240,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_func - Table "public.test_partition_func" + Table "test_table_index.test_partition_func" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -272,7 +269,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_expr - Table "public.test_partition_expr" + Table "test_table_index.test_partition_expr" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -301,7 +298,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_partition_column - Table "public.test_partition_column" + Table "test_table_index.test_partition_column" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -338,7 +335,7 @@ SUBPARTITION p_201902_b VALUES ( '2' ) ) ); \d+ test_subpartition_btree - Table "public.test_subpartition_btree" + Table "test_table_index.test_subpartition_btree" Column | Type | Modifiers | Storage | Stats target | Description ------------+-----------------------+-----------+----------+--------------+------------- month_code | character varying(30) | not null | extended | | @@ -376,7 +373,7 @@ SUBPARTITION p_201902_b VALUES ( '2' ) ) ); \d+ test_subpartition_btree - Table "public.test_subpartition_btree" + Table "test_table_index.test_subpartition_btree" Column | Type | Modifiers | Storage | Stats target | Description ------------+-----------------------+-----------+----------+--------------+------------- month_code | character varying(30) | not null | extended | | @@ -427,7 +424,7 @@ create table t1(a int , b int, index (a, b)); alter table t1 add index (a); alter table t1 add index idx_a_1(a); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -445,7 +442,7 @@ alter table t1 add index using btree(a), add index (b desc); alter table t1 add index idx_a_t1 using btree(a); alter table t1 add index idx_a_b_t1 using btree(a, b desc); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -464,7 +461,7 @@ create table t1(a int , b int, index (a, b)); alter table t1 add key idx_a_b_t1 using btree(a, b desc); alter table t1 add key idx_a_b_expr_t1 using btree((abs(a+b) + a) desc); \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -486,7 +483,7 @@ ERROR: syntax error at or near "idx1" LINE 1: alter table t1 add index using btree idx1 (a); ^ \d+ t1 - Table "public.t1" + Table "test_table_index.t1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -507,7 +504,7 @@ alter table test_normal_index add key(f11 desc, f12 asc); alter table test_normal_index add key using btree(f11, f12),add unique(f11, f12),add primary key(f11, f12); NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_normal_index_pkey" for table "test_normal_index" \d+ test_normal_index - Table "public.test_normal_index" + Table "test_table_index.test_normal_index" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | not null | plain | | @@ -536,7 +533,7 @@ alter table text_column_table add key (f11,f12); alter table text_column_table add key using btree(f11); alter table text_column_table add key using cbtree(f11); \d+ text_column_table - Table "public.text_column_table" + Table "test_table_index.text_column_table" Column | Type | Modifiers | Storage | Stats target | Description --------+-----------------------+-----------+----------+--------------+------------- f11 | integer | | plain | | @@ -575,7 +572,7 @@ alter table test_partition_btree add index (f1 desc); alter table test_partition_btree add key using btree(f1 desc, f2 asc, f3); alter table test_partition_btree add key using btree((abs(f1)) desc, (f2 * 2 + 1) asc, f3); \d+ test_partition_btree - Table "public.test_partition_btree" + Table "test_table_index.test_partition_btree" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -606,7 +603,7 @@ drop table if exists test_temporary_index1; create table test_option1(a int, b int, index idx_op1 using btree(a) comment 'yy'); alter table test_option1 add key ixd_at1 (b) comment 'aa'; \d+ test_option1 - Table "public.test_option1" + Table "test_table_index.test_option1" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -650,7 +647,7 @@ alter table test_option2 add index ixd_at2 using hash(b) comment 'aa' comment 'b create table test_option3(a int, b int, index idx_op3 (a) using btree); alter table test_option3 add index ixd_at3(b) using btree; \d+ test_option3 - Table "public.test_option3" + Table "test_table_index.test_option3" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -666,7 +663,7 @@ alter table test_option4 add index ixd_at4 using hash (b) using btree using hash NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_option4_pkey" for table "test_option4" NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_option4_a_key" for table "test_option4" \d+ test_option4 - Table "public.test_option4" + Table "test_table_index.test_option4" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -685,7 +682,7 @@ alter table test_option5 add index ixd_at5 using hash (b) using btree comment 'y NOTICE: ALTER TABLE / ADD PRIMARY KEY will create implicit index "test_option5_pkey" for table "test_option5" NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_option5_a_key" for table "test_option5" \d+ test_option5 - Table "public.test_option5" + Table "test_table_index.test_option5" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -715,7 +712,7 @@ Options: orientation=row, compression=no create table test_option6(a int, b int, key idx_op6 using hash(a) using btree comment 'yy' using hash comment 'xx'); \d+ test_option6 - Table "public.test_option6" + Table "test_table_index.test_option6" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -734,7 +731,7 @@ Options: orientation=row, compression=no create table test_option7(a int, b int, index idx_op7 (a) using btree comment 'yy' using hash comment 'xx'); \d+ test_option7 - Table "public.test_option7" + Table "test_table_index.test_option7" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -747,7 +744,7 @@ Options: orientation=row, compression=no create table test_option8(a int, b int, c int, key idx_op8_a (a) using btree comment 'yy' using hash comment 'xx', index idx_op8_b (b) using btree comment 'yy' using hash comment 'xx'); alter table test_option8 add index ixd_at8_b(b) using btree comment 'yy' using hash comment 'xx', add index ixd_at8_c (c) using btree comment 'yy' using hash comment 'xx'; \d+ test_option8 - Table "public.test_option8" + Table "test_table_index.test_option8" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- a | integer | | plain | | @@ -776,7 +773,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_option9 - Table "public.test_option9" + Table "test_table_index.test_option9" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -811,7 +808,7 @@ PARTITION BY RANGE(f1) PARTITION P4 VALUES LESS THAN(MAXVALUE) ); \d+ test_option10 - Table "public.test_option10" + Table "test_table_index.test_option10" Column | Type | Modifiers | Storage | Stats target | Description --------+---------+-----------+---------+--------------+------------- f1 | integer | | plain | | @@ -850,5 +847,17 @@ alter table test_option1 add key ixd_at13 using btree (b) using aaa using btree; ERROR: access method "aaa" does not exist alter table test_option1 add key ixd_at14 using btree (b) comment 'xx' using aaa using btree; ERROR: access method "aaa" does not exist -\c contrib_regression -DROP DATABASE test_table_index; +drop schema test_table_index cascade; +NOTICE: drop cascades to 11 other objects +DETAIL: drop cascades to table test_option1 +drop cascades to table test_option2 +drop cascades to table test_option3 +drop cascades to table test_option4 +drop cascades to table test_option5 +drop cascades to table test_option6 +drop cascades to table test_option7 +drop cascades to table test_option8 +drop cascades to table test_option9 +drop cascades to table test_option10 +drop cascades to table test_option11 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_agg.out b/contrib/dolphin/expected/tinyint_agg.out index 95b9c2103..5a949de4c 100644 --- a/contrib/dolphin/expected/tinyint_agg.out +++ b/contrib/dolphin/expected/tinyint_agg.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_agg; -NOTICE: database "tinyint_agg" does not exist, skipping -create database tinyint_agg dbcompatibility 'b'; -\c tinyint_agg +create schema tinyint_agg; +set current_schema to 'tinyint_agg'; create table u1(a int1, b int2); insert into u1 values(null, null),(127, 127),(0, 0),(-128, -128),(null, null); select avg(a), avg(b) from u1; @@ -110,54 +108,54 @@ insert into t1 select generate_series(1, 1000000); insert into smp_test select a % 128 from t1; set query_dop = 2; explain(costs off, verbose) select avg(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.avg((avg(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (avg(a)) -> Aggregate Output: avg(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select bit_and(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: bit_and((bit_and(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (bit_and(a)) -> Aggregate Output: bit_and(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select bit_or(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: bit_or((bit_or(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (bit_or(a)) -> Aggregate Output: bit_or(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select count(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: count((count(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (count(a)) -> Aggregate Output: count(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -179,33 +177,33 @@ explain(costs off, verbose) select count(distinct a) from smp_test; -> HashAggregate Output: a Group By Key: smp_test.a - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (17 rows) explain(costs off, verbose) select max(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: max((max(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (max(a)) -> Aggregate Output: max(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select min(a)from smp_test; - QUERY PLAN --------------------------------------------------- + QUERY PLAN +---------------------------------------------------- Aggregate Output: min((min((a)::double precision))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (min((a)::double precision)) -> Aggregate Output: min((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -218,20 +216,20 @@ explain(costs off, verbose) select stddev(a) from smp_test; Output: (stddev((a)::double precision)) -> Aggregate Output: stddev((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select stddev_pop(a) from smp_test; - QUERY PLAN --------------------------------------------------- + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.stddev_pop((stddev_pop(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (stddev_pop(a)) -> Aggregate Output: stddev_pop(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -244,59 +242,59 @@ explain(costs off, verbose) select stddev_samp(a) from smp_test; Output: (stddev_samp((a)::double precision)) -> Aggregate Output: stddev_samp((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select sum(a)from smp_test; - QUERY PLAN --------------------------------------------------- + QUERY PLAN +---------------------------------------------------- Aggregate Output: sum((sum((a)::double precision))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (sum((a)::double precision)) -> Aggregate Output: sum((a)::double precision) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select var_pop(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.var_pop((var_pop(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (var_pop(a)) -> Aggregate Output: var_pop(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select var_samp(a) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.var_samp((var_samp(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (var_samp(a)) -> Aggregate Output: var_samp(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) explain(costs off, verbose) select variance(a)from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +---------------------------------------------------- Aggregate Output: pg_catalog.variance((variance(a))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (variance(a)) -> Aggregate Output: variance(a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (8 rows) @@ -305,7 +303,7 @@ explain(costs off, verbose) select listagg(a) within group(order by a) from smp_ -------------------------------------------------- Aggregate Output: listagg(a ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (4 rows) @@ -314,9 +312,13 @@ explain(costs off, verbose) select listagg(a, ',') within group(order by a) from ------------------------------------------------------------- Aggregate Output: listagg(a, ','::text ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on tinyint_agg.smp_test Output: a (4 rows) -\c postgres -drop database tinyint_agg; +drop schema tinyint_agg cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table u1 +drop cascades to table smp_test +drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_cast.out b/contrib/dolphin/expected/tinyint_cast.out index b5412455f..235cb4a8f 100644 --- a/contrib/dolphin/expected/tinyint_cast.out +++ b/contrib/dolphin/expected/tinyint_cast.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_cast; -NOTICE: database "tinyint_cast" does not exist, skipping -create database tinyint_cast dbcompatibility 'b'; -\c tinyint_cast +create schema tinyint_cast; +set current_schema to 'tinyint_cast'; create table t1(a int1); insert into t1 values(''); ERROR: invalid input syntax for integer: "" @@ -382,5 +380,6 @@ select '-128'::text::int1; -128 (1 row) -\c postgres -drop database tinyint_cast; +drop schema tinyint_cast cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_index.out b/contrib/dolphin/expected/tinyint_index.out index 428856674..6c384e623 100644 --- a/contrib/dolphin/expected/tinyint_index.out +++ b/contrib/dolphin/expected/tinyint_index.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_index; -NOTICE: database "tinyint_index" does not exist, skipping -create database tinyint_index dbcompatibility 'b'; -\c tinyint_index +create schema tinyint_index; +set current_schema to 'tinyint_index'; create table t1(a int1); insert into t1 select generate_series(-128, 127); insert into t1 select generate_series(-128, 127); @@ -18,7 +16,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -28,7 +26,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -36,9 +34,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +-------------------------------------- + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -48,7 +46,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -118,7 +116,7 @@ select * from t1 where a = 1::int8; explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::tinyint) AND (t1.a < '3'::tinyint)) -> Bitmap Index Scan on idx1 @@ -128,7 +126,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > 1::smallint) AND (t1.a < 3::smallint)) -> Bitmap Index Scan on idx1 @@ -138,7 +136,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > 1) AND (t1.a < 3)) -> Bitmap Index Scan on idx1 @@ -148,7 +146,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t1 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a > 1::bigint) AND (t1.a < 3::bigint)) -> Bitmap Index Scan on idx1 @@ -218,7 +216,7 @@ select * from t1 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t1 where a >= -2::int1 and a <= -1::int1; QUERY PLAN --------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= '-2'::tinyint) AND (t1.a <= '-1'::tinyint)) -> Bitmap Index Scan on idx1 @@ -228,7 +226,7 @@ explain(costs off, verbose)select * from t1 where a >= -2::int1 and a <= -1::int explain(costs off, verbose)select * from t1 where a >= -2::int2 and a <= -1::int2; QUERY PLAN ----------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= (-2)::smallint) AND (t1.a <= (-1)::smallint)) -> Bitmap Index Scan on idx1 @@ -238,7 +236,7 @@ explain(costs off, verbose)select * from t1 where a >= -2::int2 and a <= -1::int explain(costs off, verbose)select * from t1 where a >= -2::int4 and a <= -1::int4; QUERY PLAN --------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= (-2)) AND (t1.a <= (-1))) -> Bitmap Index Scan on idx1 @@ -248,7 +246,7 @@ explain(costs off, verbose)select * from t1 where a >= -2::int4 and a <= -1::int explain(costs off, verbose)select * from t1 where a >= -2::int8 and a <= -1::int8; QUERY PLAN ------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: ((t1.a >= (-2)::bigint) AND (t1.a <= (-1)::bigint)) -> Bitmap Index Scan on idx1 @@ -361,7 +359,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -371,7 +369,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -379,9 +377,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +-------------------------------------- + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -391,7 +389,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on tinyint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -470,7 +468,7 @@ explain(costs off, verbose)select * from t1 where a >= -1::int1 and a <= 0::int1 ---------------------------------------------------------------------- Row Adapter Output: a - -> CStore Scan on public.t1 + -> CStore Scan on tinyint_index.t1 Output: a Filter: ((t1.a >= '-1'::tinyint) AND (t1.a <= '0'::tinyint)) (5 rows) @@ -480,17 +478,17 @@ explain(costs off, verbose)select * from t1 where a >= -1::int2 and a <= 0::int2 -------------------------------------------------------------------------- Row Adapter Output: a - -> CStore Index Only Scan using idx1 on public.t1 + -> CStore Index Only Scan using idx1 on tinyint_index.t1 Output: a Index Cond: ((t1.a >= (-1)::smallint) AND (t1.a <= 0::smallint)) (5 rows) explain(costs off, verbose)select * from t1 where a >= -1::int4 and a <= 0::int4; - QUERY PLAN ------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------- Row Adapter Output: a - -> CStore Index Only Scan using idx1 on public.t1 + -> CStore Index Only Scan using idx1 on tinyint_index.t1 Output: a Index Cond: ((t1.a >= (-1)) AND (t1.a <= 0)) (5 rows) @@ -500,8 +498,13 @@ explain(costs off, verbose)select * from t1 where a >= -1::int8 and a <= 0::int8 ---------------------------------------------------------------------- Row Adapter Output: a - -> CStore Index Only Scan using idx1 on public.t1 + -> CStore Index Only Scan using idx1 on tinyint_index.t1 Output: a Index Cond: ((t1.a >= (-1)::bigint) AND (t1.a <= 0::bigint)) (5 rows) +drop schema tinyint_index cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_operator.out b/contrib/dolphin/expected/tinyint_operator.out index c0267aaa6..a84a75772 100644 --- a/contrib/dolphin/expected/tinyint_operator.out +++ b/contrib/dolphin/expected/tinyint_operator.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_operator; -NOTICE: database "tinyint_operator" does not exist, skipping -create database tinyint_operator dbcompatibility 'b'; -\c tinyint_operator +create schema tinyint_operator; +set current_schema to 'tinyint_operator'; select 1::int1 + 1::int1; ?column? ---------- @@ -258,5 +256,5 @@ select @127::int1; select @(-128)::int1; ERROR: tinyint out of range -\c postgres -drop database tinyint_operator; +drop schema tinyint_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_partition.out b/contrib/dolphin/expected/tinyint_partition.out index c4a56d9f1..f71fee764 100644 --- a/contrib/dolphin/expected/tinyint_partition.out +++ b/contrib/dolphin/expected/tinyint_partition.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_partition; -NOTICE: database "tinyint_partition" does not exist, skipping -create database tinyint_partition dbcompatibility 'b'; -\c tinyint_partition +create schema tinyint_partition; +set current_schema to 'tinyint_partition'; CREATE TABLE t1 ( col1 int1 NOT NULL, @@ -248,5 +246,15 @@ select * from start_end1; 1 (1 row) -\c postgres -drop database tinyint_partition; +drop schema tinyint_partition cascade; +NOTICE: drop cascades to 9 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table a1 +drop cascades to table a2 +drop cascades to table subpartition_01 +drop cascades to table subpartition_02 +drop cascades to table subpartition_03 +drop cascades to table start_end1 +reset current_schema; diff --git a/contrib/dolphin/expected/tinyint_smp_join_procedure.out b/contrib/dolphin/expected/tinyint_smp_join_procedure.out index b7bf750e5..146b1fb6d 100644 --- a/contrib/dolphin/expected/tinyint_smp_join_procedure.out +++ b/contrib/dolphin/expected/tinyint_smp_join_procedure.out @@ -1,7 +1,5 @@ -drop database if exists tinyint_smp; -NOTICE: database "tinyint_smp" does not exist, skipping -create database tinyint_smp dbcompatibility 'b'; -\c tinyint_smp +create schema tinyint_smp; +set current_schema to 'tinyint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; drop table if exists t1; @@ -16,7 +14,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on tinyint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -26,15 +24,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a = 2) AND (tinyint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a > 500) AND (tinyint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -44,7 +42,7 @@ explain(costs off, verbose) select * from t2 where a = 2; [No Bypass]reason: Bypass not executed because query's scan operator is not index. Streaming(type: LOCAL GATHER dop: 1/4) Output: a - -> Seq Scan on public.t2 + -> Seq Scan on tinyint_smp.t2 Output: a Filter: (t2.a = 2) (6 rows) @@ -54,19 +52,19 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: tinyint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a = 2) AND (tinyint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a + Output: tinyint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: tinyint_smp.t2.a + -> Seq Scan on tinyint_smp.t2 + Output: tinyint_smp.t2.a + Filter: ((tinyint_smp.t2.a > 500) AND (tinyint_smp.t2.a = 2)) (15 rows) set query_dop = 1; @@ -83,13 +81,13 @@ explain(costs off, verbose) select * from join_1 join join_2; Output: join_1.a, join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (12 rows) @@ -104,14 +102,14 @@ explain(costs off, verbose) select * from join_1 join join_2 on join_1.a = join_ -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -126,14 +124,14 @@ explain(costs off, verbose) select * from join_1 left join join_2 on join_1.a = -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -148,14 +146,14 @@ explain(costs off, verbose) select * from join_1 right join join_2 on join_1.a = -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a -> Hash Output: join_1.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a (17 rows) @@ -170,14 +168,14 @@ explain(costs off, verbose) select * from join_1 inner join join_2 on join_1.a = -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -192,14 +190,14 @@ explain(costs off, verbose) select /*+ nestloop(join_1 join_2)*/ * from join_1 l -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -214,14 +212,14 @@ explain(costs off, verbose) select /*+ hashjoin(join_1 join_2)*/ * from join_1 l -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -237,14 +235,14 @@ WARNING: unused hint: MergeJoin(join_1 join_2) -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_1.a Distribute Key: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on tinyint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL REDISTRIBUTE dop: 2/2) Output: join_2.a Distribute Key: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on tinyint_smp.join_2 Output: join_2.a (17 rows) @@ -275,5 +273,12 @@ select * from test1; -4 (2 rows) -\c postgres -drop database tinyint_smp; +drop schema tinyint_smp cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table join_1 +drop cascades to table join_2 +drop cascades to function test_p1(tinyint,tinyint) +drop cascades to table test1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_agg.out b/contrib/dolphin/expected/uint_agg.out index 68ef6d574..7bfe7558f 100644 --- a/contrib/dolphin/expected/uint_agg.out +++ b/contrib/dolphin/expected/uint_agg.out @@ -1,7 +1,5 @@ -drop database if exists uint_agg; -NOTICE: database "uint_agg" does not exist, skipping -create database uint_agg dbcompatibility 'b'; -\c uint_agg +create schema uint_agg; +set current_schema to 'uint_agg'; --uint1 create table u1(a uint1, b int2); insert into u1 values(null, null),(255, 255),(0, 0),(255, 255),(null, null); @@ -411,7 +409,7 @@ explain(costs off, verbose) select avg(a), avg(b) from smp_test; Output: (avg(a)), (avg(b)) -> Aggregate Output: avg(a), avg(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -424,7 +422,7 @@ explain(costs off, verbose) select bit_and(a), bit_and(b) from smp_test; Output: (bit_and(a)), (bit_and(b)) -> Aggregate Output: bit_and(a), bit_and(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -437,20 +435,20 @@ explain(costs off, verbose) select bit_or(a), bit_or(b) from smp_test; Output: (bit_or(a)), (bit_or(b)) -> Aggregate Output: bit_or(a), bit_or(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) explain(costs off, verbose) select count(a), count(b) from smp_test; - QUERY PLAN ------------------------------------------------- + QUERY PLAN +------------------------------------------------- Aggregate Output: count((count(a))), count((count(b))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (count(a)), (count(b)) -> Aggregate Output: count(a), count(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -459,33 +457,33 @@ explain(costs off, verbose) select count(distinct a), count(distinct b) from smp ------------------------------------------------ Aggregate Output: count(DISTINCT a), count(DISTINCT b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (4 rows) explain(costs off, verbose) select max(a), max(b) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +------------------------------------------------- Aggregate Output: max((max(a))), max((max(b))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (max(a)), (max(b)) -> Aggregate Output: max(a), max(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) explain(costs off, verbose) select min(a), min(b) from smp_test; - QUERY PLAN ------------------------------------------------ + QUERY PLAN +------------------------------------------------- Aggregate Output: min((min(a))), min((min(b))) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: (min(a)), (min(b)) -> Aggregate Output: min(a), min(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -498,7 +496,7 @@ explain(costs off, verbose) select stddev(a), stddev(b) from smp_test; Output: (stddev(a)), (stddev(b)) -> Aggregate Output: stddev(a), stddev(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -511,7 +509,7 @@ explain(costs off, verbose) select stddev_pop(a), stddev_pop(b) from smp_test; Output: (stddev_pop(a)), (stddev_pop(b)) -> Aggregate Output: stddev_pop(a), stddev_pop(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -524,7 +522,7 @@ explain(costs off, verbose) select stddev_samp(a), stddev_samp(b) from smp_test; Output: (stddev_samp(a)), (stddev_samp(b)) -> Aggregate Output: stddev_samp(a), stddev_samp(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -537,7 +535,7 @@ explain(costs off, verbose) select sum(a), sum(b) from smp_test; Output: (sum(a)), (sum(b)) -> Aggregate Output: sum(a), sum(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -550,7 +548,7 @@ explain(costs off, verbose) select var_pop(a), var_pop(b) from smp_test; Output: (var_pop(a)), (var_pop(b)) -> Aggregate Output: var_pop(a), var_pop(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -563,7 +561,7 @@ explain(costs off, verbose) select var_samp(a), var_samp(b) from smp_test; Output: (var_samp(a)), (var_samp(b)) -> Aggregate Output: var_samp(a), var_samp(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -576,7 +574,7 @@ explain(costs off, verbose) select variance(a), variance(b) from smp_test; Output: (variance(a)), (variance(b)) -> Aggregate Output: variance(a), variance(b) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (8 rows) @@ -585,7 +583,7 @@ explain(costs off, verbose) select listagg(a) within group(order by a) from smp_ -------------------------------------------------- Aggregate Output: listagg(a ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (4 rows) @@ -594,9 +592,15 @@ explain(costs off, verbose) select listagg(a, ',') within group(order by a) from ------------------------------------------------------------- Aggregate Output: listagg(a, ','::text ) WITHIN GROUP ( ORDER BY a) - -> Seq Scan on public.smp_test + -> Seq Scan on uint_agg.smp_test Output: a, b (4 rows) -\c postgres -drop database uint_agg; +drop schema uint_agg cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table u1 +drop cascades to table u2 +drop cascades to table u4 +drop cascades to table u8 +drop cascades to table smp_test +reset current_schema; diff --git a/contrib/dolphin/expected/uint_and.out b/contrib/dolphin/expected/uint_and.out index 487796f5d..04f5e9e3a 100644 --- a/contrib/dolphin/expected/uint_and.out +++ b/contrib/dolphin/expected/uint_and.out @@ -1,7 +1,5 @@ -drop database if exists uint_and; -NOTICE: database "uint_and" does not exist, skipping -create database uint_and dbcompatibility 'b'; -\c uint_and +create schema uint_and; +set current_schema to 'uint_and'; --uint8 select 18446744073709551615::uint8 & 0::int1; ?column? @@ -826,5 +824,5 @@ select 127::int1 & 1::uint8; 1 (1 row) -\c postgres -drop database uint_and +drop schema uint_and cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_auto_increment.out b/contrib/dolphin/expected/uint_auto_increment.out index 71ae501b5..270d85998 100644 --- a/contrib/dolphin/expected/uint_auto_increment.out +++ b/contrib/dolphin/expected/uint_auto_increment.out @@ -1,6 +1,5 @@ --- create b db -create database uint_auto_increment with dbcompatibility = 'B'; -\c uint_auto_increment +create schema uint_auto_increment; +set current_schema to 'uint_auto_increment'; -- test CREATE TABLE with AUTO_INCREMENT -- syntax error CREATE TABLE test_create_autoinc_err(id int unsigned auto_increment key, name varchar(200),a int unsigned); @@ -240,7 +239,7 @@ SELECT id, col FROM test_alter_autoinc ORDER BY 1, 2; SELECT pg_catalog.pg_get_tabledef('test_alter_autoinc'); pg_get_tabledef --------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE test_alter_autoinc ( + col uint4, + id uint4 AUTO_INCREMENT NOT NULL, + @@ -446,7 +445,7 @@ SELECT col FROM single_autoinc_pk ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('single_autoinc_pk'); pg_get_tabledef --------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_pk ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT single_autoinc_pk_pkey PRIMARY KEY (col)+ @@ -461,7 +460,7 @@ NOTICE: ALTER TABLE / ADD UNIQUE will create implicit index "test_alter_single_ SELECT pg_catalog.pg_get_tabledef('single_autoinc_pk'); pg_get_tabledef -------------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_pk ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT test_alter_single_autoinc_pk_u2 UNIQUE (col) + @@ -475,7 +474,7 @@ ALTER TABLE single_autoinc_pk DROP CONSTRAINT test_alter_single_autoinc_pk_u2; SELECT pg_catalog.pg_get_tabledef('single_autoinc_pk'); pg_get_tabledef ----------------------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_pk ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT test_alter_single_autoinc_pk_u1 UNIQUE (col) + @@ -614,7 +613,7 @@ SELECT col FROM single_autoinc_uk ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('single_autoinc_uk'); pg_get_tabledef ------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE single_autoinc_uk ( + col uint4 AUTO_INCREMENT, + CONSTRAINT single_autoinc_uk_col_key UNIQUE (col)+ @@ -1607,7 +1606,7 @@ SELECT col FROM gtemp_single_autoinc ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('gtemp_single_autoinc'); pg_get_tabledef ---------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE GLOBAL TEMPORARY TABLE gtemp_single_autoinc ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT gtemp_single_autoinc_pkey PRIMARY KEY (col) + @@ -1873,7 +1872,7 @@ SELECT col FROM unlog_single_autoinc ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('unlog_single_autoinc'); pg_get_tabledef ------------------------------------------------------------ - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE UNLOGGED TABLE unlog_single_autoinc ( + col uint4 AUTO_INCREMENT NOT NULL, + CONSTRAINT unlog_single_autoinc_pkey PRIMARY KEY (col)+ @@ -2160,7 +2159,7 @@ SELECT col1 FROM test_part_autoinc_pk ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('test_part_autoinc_pk'); pg_get_tabledef ----------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE test_part_autoinc_pk ( + col1 integer AUTO_INCREMENT NOT NULL, + col2 integer NOT NULL, + @@ -2475,7 +2474,7 @@ SELECT col1 FROM test_part_autoinc_unique ORDER BY 1; SELECT pg_catalog.pg_get_tabledef('test_part_autoinc_unique'); pg_get_tabledef -------------------------------------------------------------------------------------- - SET search_path = public; + + SET search_path = uint_auto_increment; + CREATE TABLE test_part_autoinc_unique ( + col1 integer AUTO_INCREMENT, + col2 integer NOT NULL, + @@ -3134,5 +3133,6 @@ SELECT col1,col2 FROM test_autoinc_insert_select ORDER BY 1; drop table test_autoinc_source; drop table test_autoinc_insert_select; -\c postgres -drop database if exists uint_auto_increment; +drop schema uint_auto_increment cascade; +NOTICE: drop cascades to table uint_auto_increment.test_create_autoinc_err +reset current_schema; diff --git a/contrib/dolphin/expected/uint_cast.out b/contrib/dolphin/expected/uint_cast.out index 2f77ac890..caeac0fcc 100644 --- a/contrib/dolphin/expected/uint_cast.out +++ b/contrib/dolphin/expected/uint_cast.out @@ -1,7 +1,5 @@ -drop database if exists uint_cast; -NOTICE: database "uint_cast" does not exist, skipping -create database uint_cast dbcompatibility 'b'; -\c uint_cast +create schema uint_cast; +set current_schema to 'uint_cast'; select (-1)::bool::uint8; uint8 ------- @@ -1223,5 +1221,5 @@ select (1)::uint8::bool; t (1 row) -\c postgres -drop database uint_cast; +drop schema uint_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_cast2.out b/contrib/dolphin/expected/uint_cast2.out index 76beef401..1810ab11a 100644 --- a/contrib/dolphin/expected/uint_cast2.out +++ b/contrib/dolphin/expected/uint_cast2.out @@ -1,7 +1,5 @@ -drop database if exists uint_cast2; -NOTICE: database "uint_cast2" does not exist, skipping -create database uint_cast2 dbcompatibility 'b'; -\c uint_cast2 +create schema uint_cast2; +set current_schema to 'uint_cast2'; drop table if exists t1 ; NOTICE: table "t1" does not exist, skipping create table t1(a uint8); @@ -451,5 +449,6 @@ insert into t1 values(255::uint1); insert into t1 values(256::uint1); ERROR: tinyint unsigned out of range CONTEXT: referenced column: a -\c postgres -drop database uint_cast2; +drop schema uint_cast2 cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_div.out b/contrib/dolphin/expected/uint_div.out index aa6b04902..a71da32ab 100644 --- a/contrib/dolphin/expected/uint_div.out +++ b/contrib/dolphin/expected/uint_div.out @@ -1,7 +1,5 @@ -drop database if exists uint_div; -NOTICE: database "uint_div" does not exist, skipping -create database uint_div dbcompatibility 'b'; -\c uint_div +create schema uint_div; +set current_schema to 'uint_div'; --uint8 select 18446744073709551615::uint8 / 0::int1; ?column? @@ -640,5 +638,5 @@ select 127::int1 / 1::uint8; 127 (1 row) -\c postgres -drop database uint_div +drop schema uint_div cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_ignore.out b/contrib/dolphin/expected/uint_ignore.out index 1d0723266..f8c266744 100644 --- a/contrib/dolphin/expected/uint_ignore.out +++ b/contrib/dolphin/expected/uint_ignore.out @@ -1,7 +1,5 @@ -drop database if exists uint_ignore; -NOTICE: database "uint_ignore" does not exist, skipping -create database uint_ignore dbcompatibility 'b'; -\c uint_ignore +create schema uint_ignore; +set current_schema to 'uint_ignore'; drop table if exists t1 ; NOTICE: table "t1" does not exist, skipping create table t1(a uint8); @@ -661,5 +659,6 @@ select * from t1; 255 (61 rows) -\c postgres -drop database uint_ignore; +drop schema uint_ignore cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_in.out b/contrib/dolphin/expected/uint_in.out index 503f8cbbc..f47b87c4e 100644 --- a/contrib/dolphin/expected/uint_in.out +++ b/contrib/dolphin/expected/uint_in.out @@ -1,7 +1,5 @@ -drop database if exists uint_in; -NOTICE: database "uint_in" does not exist, skipping -create database uint_in dbcompatibility 'b'; -\c uint_in +create schema uint_in; +set current_schema to 'uint_in'; create table t1(a uint1); create table t2(a uint2); create table t3(a uint4); @@ -62,5 +60,10 @@ select * from t4; 2 (2 rows) -\c postgres -drop database uint_in +drop schema uint_in cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table t4 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_index.out b/contrib/dolphin/expected/uint_index.out index b4fb77c69..caf06afe6 100644 --- a/contrib/dolphin/expected/uint_index.out +++ b/contrib/dolphin/expected/uint_index.out @@ -1,7 +1,5 @@ -drop database if exists uint_index; -NOTICE: database "uint_index" does not exist, skipping -create database uint_index dbcompatibility 'b'; -\c uint_index +create schema uint_index; +set current_schema to 'uint_index'; create table t1(a uint1); insert into t1 select generate_series(1, 255); insert into t1 select generate_series(1, 255); @@ -18,7 +16,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint1) -> Bitmap Index Scan on idx1 @@ -28,7 +26,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint1; explain(costs off, verbose)select * from t1 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint2) -> Bitmap Index Scan on idx1 @@ -38,7 +36,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint2; explain(costs off, verbose)select * from t1 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint4) -> Bitmap Index Scan on idx1 @@ -48,7 +46,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint4; explain(costs off, verbose)select * from t1 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint8) -> Bitmap Index Scan on idx1 @@ -58,7 +56,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint8; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -68,7 +66,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -76,9 +74,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -88,7 +86,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -99,7 +97,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx1 on public.t1 + Index Only Scan using idx1 on uint_index.t1 Output: a Index Cond: ((t1.a > '1'::uint1) AND (t1.a < '3'::uint1)) (4 rows) @@ -107,7 +105,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint1 and a < 3::uint1; explain(costs off, verbose)select * from t1 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::uint2) AND (t1.a < '3'::uint2)) -> Bitmap Index Scan on idx1 @@ -117,7 +115,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint2 and a < 3::uint2; explain(costs off, verbose)select * from t1 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::uint4) AND (t1.a < '3'::uint4)) -> Bitmap Index Scan on idx1 @@ -127,7 +125,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint4 and a < 3::uint4; explain(costs off, verbose)select * from t1 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::uint8) AND (t1.a < '3'::uint8)) -> Bitmap Index Scan on idx1 @@ -137,7 +135,7 @@ explain(costs off, verbose)select * from t1 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > '1'::tinyint) AND (t1.a < '3'::tinyint)) -> Bitmap Index Scan on idx1 @@ -147,7 +145,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > 1::smallint) AND (t1.a < 3::smallint)) -> Bitmap Index Scan on idx1 @@ -157,7 +155,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > 1) AND (t1.a < 3)) -> Bitmap Index Scan on idx1 @@ -167,7 +165,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t1 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a > 1::bigint) AND (t1.a < 3::bigint)) -> Bitmap Index Scan on idx1 @@ -177,7 +175,7 @@ explain(costs off, verbose)select * from t1 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t1 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint1) AND (t1.a <= '3'::uint1)) -> Bitmap Index Scan on idx1 @@ -187,7 +185,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint2) AND (t1.a <= '3'::uint2)) -> Bitmap Index Scan on idx1 @@ -197,7 +195,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint4) AND (t1.a <= '3'::uint4)) -> Bitmap Index Scan on idx1 @@ -207,7 +205,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::uint8) AND (t1.a <= '3'::uint8)) -> Bitmap Index Scan on idx1 @@ -217,7 +215,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t1 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= '1'::tinyint) AND (t1.a <= '3'::tinyint)) -> Bitmap Index Scan on idx1 @@ -227,7 +225,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t1 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= 1::smallint) AND (t1.a <= 3::smallint)) -> Bitmap Index Scan on idx1 @@ -237,7 +235,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t1 where a >= 1::int4 and a <= 3::int4; QUERY PLAN --------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= 1) AND (t1.a <= 3)) -> Bitmap Index Scan on idx1 @@ -247,7 +245,7 @@ explain(costs off, verbose)select * from t1 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t1 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: ((t1.a >= 1::bigint) AND (t1.a <= 3::bigint)) -> Bitmap Index Scan on idx1 @@ -260,7 +258,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint1) -> Bitmap Index Scan on idx1 @@ -270,7 +268,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint1; explain(costs off, verbose)select * from t1 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint2) -> Bitmap Index Scan on idx1 @@ -280,7 +278,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint2; explain(costs off, verbose)select * from t1 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint4) -> Bitmap Index Scan on idx1 @@ -290,7 +288,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint4; explain(costs off, verbose)select * from t1 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::uint8) -> Bitmap Index Scan on idx1 @@ -300,7 +298,7 @@ explain(costs off, verbose)select * from t1 where a = 1::uint8; explain(costs off, verbose)select * from t1 where a = 1::int1; QUERY PLAN ------------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = '1'::tinyint) -> Bitmap Index Scan on idx1 @@ -310,7 +308,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int1; explain(costs off, verbose)select * from t1 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::smallint) -> Bitmap Index Scan on idx1 @@ -318,9 +316,9 @@ explain(costs off, verbose)select * from t1 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t1 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t1 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1) -> Bitmap Index Scan on idx1 @@ -330,7 +328,7 @@ explain(costs off, verbose)select * from t1 where a = 1::int4; explain(costs off, verbose)select * from t1 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t1 + Bitmap Heap Scan on uint_index.t1 Output: a Recheck Cond: (t1.a = 1::bigint) -> Bitmap Index Scan on idx1 @@ -353,7 +351,7 @@ analyze t2; explain(costs off, verbose)select * from t2 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -363,7 +361,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint1; explain(costs off, verbose)select * from t2 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint2) -> Bitmap Index Scan on idx2 @@ -373,7 +371,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint2; explain(costs off, verbose)select * from t2 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint4) -> Bitmap Index Scan on idx2 @@ -383,7 +381,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint4; explain(costs off, verbose)select * from t2 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -393,7 +391,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint8; explain(costs off, verbose)select * from t2 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -403,7 +401,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int1; explain(costs off, verbose)select * from t2 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::smallint) -> Bitmap Index Scan on idx2 @@ -411,9 +409,9 @@ explain(costs off, verbose)select * from t2 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t2 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t2 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1) -> Bitmap Index Scan on idx2 @@ -423,7 +421,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int4; explain(costs off, verbose)select * from t2 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -433,7 +431,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int8; explain(costs off, verbose)select * from t2 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > '1'::uint8) AND (t2.a < '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -444,7 +442,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx2 on public.t2 + Index Only Scan using idx2 on uint_index.t2 Output: a Index Cond: ((t2.a > '1'::uint2) AND (t2.a < '3'::uint2)) (4 rows) @@ -452,7 +450,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint2 and a < 3::uint2; explain(costs off, verbose)select * from t2 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > '1'::uint4) AND (t2.a < '3'::uint4)) -> Bitmap Index Scan on idx2 @@ -462,7 +460,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint4 and a < 3::uint4; explain(costs off, verbose)select * from t2 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > '1'::uint8) AND (t2.a < '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -472,7 +470,7 @@ explain(costs off, verbose)select * from t2 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t2 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1::bigint) AND (t2.a < 3::bigint)) -> Bitmap Index Scan on idx2 @@ -482,7 +480,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t2 where a > 1::int2 and a < 3::int2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1::smallint) AND (t2.a < 3::smallint)) -> Bitmap Index Scan on idx2 @@ -492,7 +490,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t2 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1) AND (t2.a < 3)) -> Bitmap Index Scan on idx2 @@ -502,7 +500,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t2 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a > 1::bigint) AND (t2.a < 3::bigint)) -> Bitmap Index Scan on idx2 @@ -512,7 +510,7 @@ explain(costs off, verbose)select * from t2 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t2 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint8) AND (t2.a <= '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -522,7 +520,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint2) AND (t2.a <= '3'::uint2)) -> Bitmap Index Scan on idx2 @@ -532,7 +530,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint4) AND (t2.a <= '3'::uint4)) -> Bitmap Index Scan on idx2 @@ -542,7 +540,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= '1'::uint8) AND (t2.a <= '3'::uint8)) -> Bitmap Index Scan on idx2 @@ -552,7 +550,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t2 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1::bigint) AND (t2.a <= 3::bigint)) -> Bitmap Index Scan on idx2 @@ -562,7 +560,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t2 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ----------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1::smallint) AND (t2.a <= 3::smallint)) -> Bitmap Index Scan on idx2 @@ -572,7 +570,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t2 where a >= 1::int4 and a <= 3::int4; QUERY PLAN --------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1) AND (t2.a <= 3)) -> Bitmap Index Scan on idx2 @@ -582,7 +580,7 @@ explain(costs off, verbose)select * from t2 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t2 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: ((t2.a >= 1::bigint) AND (t2.a <= 3::bigint)) -> Bitmap Index Scan on idx2 @@ -595,7 +593,7 @@ analyze t2; explain(costs off, verbose)select * from t2 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -605,7 +603,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint1; explain(costs off, verbose)select * from t2 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint2) -> Bitmap Index Scan on idx2 @@ -615,7 +613,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint2; explain(costs off, verbose)select * from t2 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint4) -> Bitmap Index Scan on idx2 @@ -625,7 +623,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint4; explain(costs off, verbose)select * from t2 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = '1'::uint8) -> Bitmap Index Scan on idx2 @@ -635,7 +633,7 @@ explain(costs off, verbose)select * from t2 where a = 1::uint8; explain(costs off, verbose)select * from t2 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -645,7 +643,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int1; explain(costs off, verbose)select * from t2 where a = 1::int2; QUERY PLAN ------------------------------------------ - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::smallint) -> Bitmap Index Scan on idx2 @@ -653,9 +651,9 @@ explain(costs off, verbose)select * from t2 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t2 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t2 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1) -> Bitmap Index Scan on idx2 @@ -665,7 +663,7 @@ explain(costs off, verbose)select * from t2 where a = 1::int4; explain(costs off, verbose)select * from t2 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t2 + Bitmap Heap Scan on uint_index.t2 Output: a Recheck Cond: (t2.a = 1::bigint) -> Bitmap Index Scan on idx2 @@ -688,7 +686,7 @@ analyze t3; explain(costs off, verbose)select * from t3 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -698,7 +696,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint1; explain(costs off, verbose)select * from t3 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -708,7 +706,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint2; explain(costs off, verbose)select * from t3 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint4) -> Bitmap Index Scan on idx3 @@ -718,7 +716,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint4; explain(costs off, verbose)select * from t3 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -728,7 +726,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint8; explain(costs off, verbose)select * from t3 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -738,7 +736,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int1; explain(costs off, verbose)select * from t3 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -746,9 +744,9 @@ explain(costs off, verbose)select * from t3 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t3 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t3 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1) -> Bitmap Index Scan on idx3 @@ -758,7 +756,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int4; explain(costs off, verbose)select * from t3 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -768,7 +766,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int8; explain(costs off, verbose)select * from t3 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > '1'::uint8) AND (t3.a < '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -778,7 +776,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint1 and a < 3::uint1; explain(costs off, verbose)select * from t3 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > '1'::uint8) AND (t3.a < '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -789,7 +787,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx3 on public.t3 + Index Only Scan using idx3 on uint_index.t3 Output: a Index Cond: ((t3.a > '1'::uint4) AND (t3.a < '3'::uint4)) (4 rows) @@ -797,7 +795,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint4 and a < 3::uint4; explain(costs off, verbose)select * from t3 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > '1'::uint8) AND (t3.a < '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -807,7 +805,7 @@ explain(costs off, verbose)select * from t3 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t3 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1::bigint) AND (t3.a < 3::bigint)) -> Bitmap Index Scan on idx3 @@ -817,7 +815,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t3 where a > 1::int2 and a < 3::int2; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1::bigint) AND (t3.a < 3::bigint)) -> Bitmap Index Scan on idx3 @@ -827,7 +825,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t3 where a > 1::int4 and a < 3::int4; QUERY PLAN ------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1) AND (t3.a < 3)) -> Bitmap Index Scan on idx3 @@ -837,7 +835,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t3 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a > 1::bigint) AND (t3.a < 3::bigint)) -> Bitmap Index Scan on idx3 @@ -847,7 +845,7 @@ explain(costs off, verbose)select * from t3 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t3 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint8) AND (t3.a <= '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -857,7 +855,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint8) AND (t3.a <= '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -867,7 +865,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint4) AND (t3.a <= '3'::uint4)) -> Bitmap Index Scan on idx3 @@ -877,7 +875,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= '1'::uint8) AND (t3.a <= '3'::uint8)) -> Bitmap Index Scan on idx3 @@ -887,7 +885,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t3 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1::bigint) AND (t3.a <= 3::bigint)) -> Bitmap Index Scan on idx3 @@ -897,7 +895,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t3 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1::bigint) AND (t3.a <= 3::bigint)) -> Bitmap Index Scan on idx3 @@ -907,7 +905,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t3 where a >= 1::int4 and a <= 3::int4; QUERY PLAN --------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1) AND (t3.a <= 3)) -> Bitmap Index Scan on idx3 @@ -917,7 +915,7 @@ explain(costs off, verbose)select * from t3 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t3 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: ((t3.a >= 1::bigint) AND (t3.a <= 3::bigint)) -> Bitmap Index Scan on idx3 @@ -930,7 +928,7 @@ analyze t3; explain(costs off, verbose)select * from t3 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -940,7 +938,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint1; explain(costs off, verbose)select * from t3 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -950,7 +948,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint2; explain(costs off, verbose)select * from t3 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint4) -> Bitmap Index Scan on idx3 @@ -960,7 +958,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint4; explain(costs off, verbose)select * from t3 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = '1'::uint8) -> Bitmap Index Scan on idx3 @@ -970,7 +968,7 @@ explain(costs off, verbose)select * from t3 where a = 1::uint8; explain(costs off, verbose)select * from t3 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -980,7 +978,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int1; explain(costs off, verbose)select * from t3 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -988,9 +986,9 @@ explain(costs off, verbose)select * from t3 where a = 1::int2; (5 rows) explain(costs off, verbose)select * from t3 where a = 1::int4; - QUERY PLAN ---------------------------------- - Bitmap Heap Scan on public.t3 + QUERY PLAN +----------------------------------- + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1) -> Bitmap Index Scan on idx3 @@ -1000,7 +998,7 @@ explain(costs off, verbose)select * from t3 where a = 1::int4; explain(costs off, verbose)select * from t3 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t3 + Bitmap Heap Scan on uint_index.t3 Output: a Recheck Cond: (t3.a = 1::bigint) -> Bitmap Index Scan on idx3 @@ -1023,7 +1021,7 @@ analyze t4; explain(costs off, verbose)select * from t4 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1033,7 +1031,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint1; explain(costs off, verbose)select * from t4 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1043,7 +1041,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint2; explain(costs off, verbose)select * from t4 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1053,7 +1051,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint4; explain(costs off, verbose)select * from t4 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1063,7 +1061,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint8; explain(costs off, verbose)select * from t4 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1073,7 +1071,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int1; explain(costs off, verbose)select * from t4 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1083,7 +1081,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int2; explain(costs off, verbose)select * from t4 where a = 1::int4; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1093,7 +1091,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int4; explain(costs off, verbose)select * from t4 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1104,7 +1102,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint1 and a < 3::uint1; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1113,7 +1111,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint2 and a < 3::uint2; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1122,7 +1120,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint4 and a < 3::uint4; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1131,7 +1129,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint8 and a < 3::uint8; QUERY PLAN ------------------------------------------------------------- [Bypass] - Index Only Scan using idx4 on public.t4 + Index Only Scan using idx4 on uint_index.t4 Output: a Index Cond: ((t4.a > '1'::uint8) AND (t4.a < '3'::uint8)) (4 rows) @@ -1139,7 +1137,7 @@ explain(costs off, verbose)select * from t4 where a > 1::uint8 and a < 3::uint8; explain(costs off, verbose)select * from t4 where a > 1::int1 and a < 3::int1; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1149,7 +1147,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int1 and a < 3::int1; explain(costs off, verbose)select * from t4 where a > 1::int2 and a < 3::int2; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1159,7 +1157,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int2 and a < 3::int2; explain(costs off, verbose)select * from t4 where a > 1::int4 and a < 3::int4; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1169,7 +1167,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int4 and a < 3::int4; explain(costs off, verbose)select * from t4 where a > 1::int8 and a < 3::int8; QUERY PLAN ----------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a > 1::bigint) AND (t4.a < 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1179,7 +1177,7 @@ explain(costs off, verbose)select * from t4 where a > 1::int8 and a < 3::int8; explain(costs off, verbose)select * from t4 where a >= 1::uint1 and a <= 3::uint1; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1189,7 +1187,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint1 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::uint2 and a <= 3::uint2; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1199,7 +1197,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint2 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::uint4 and a <= 3::uint4; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1209,7 +1207,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint4 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::uint8 and a <= 3::uint8; QUERY PLAN --------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= '1'::uint8) AND (t4.a <= '3'::uint8)) -> Bitmap Index Scan on idx4 @@ -1219,7 +1217,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::uint8 and a <= 3::uint explain(costs off, verbose)select * from t4 where a >= 1::int1 and a <= 3::int1; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1229,7 +1227,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::int1 and a <= 3::int1; explain(costs off, verbose)select * from t4 where a >= 1::int2 and a <= 3::int2; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1239,7 +1237,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::int2 and a <= 3::int2; explain(costs off, verbose)select * from t4 where a >= 1::int4 and a <= 3::int4; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1249,7 +1247,7 @@ explain(costs off, verbose)select * from t4 where a >= 1::int4 and a <= 3::int4; explain(costs off, verbose)select * from t4 where a >= 1::int8 and a <= 3::int8; QUERY PLAN ------------------------------------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: ((t4.a >= 1::bigint) AND (t4.a <= 3::bigint)) -> Bitmap Index Scan on idx4 @@ -1262,7 +1260,7 @@ analyze t4; explain(costs off, verbose)select * from t4 where a = 1::uint1; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1272,7 +1270,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint1; explain(costs off, verbose)select * from t4 where a = 1::uint2; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1282,7 +1280,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint2; explain(costs off, verbose)select * from t4 where a = 1::uint4; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1292,7 +1290,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint4; explain(costs off, verbose)select * from t4 where a = 1::uint8; QUERY PLAN ----------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = '1'::uint8) -> Bitmap Index Scan on idx4 @@ -1302,7 +1300,7 @@ explain(costs off, verbose)select * from t4 where a = 1::uint8; explain(costs off, verbose)select * from t4 where a = 1::int1; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1312,7 +1310,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int1; explain(costs off, verbose)select * from t4 where a = 1::int2; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1322,7 +1320,7 @@ explain(costs off, verbose)select * from t4 where a = 1::int2; explain(costs off, verbose)select * from t4 where a = 1::int4; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 @@ -1332,12 +1330,17 @@ explain(costs off, verbose)select * from t4 where a = 1::int4; explain(costs off, verbose)select * from t4 where a = 1::int8; QUERY PLAN ---------------------------------------- - Bitmap Heap Scan on public.t4 + Bitmap Heap Scan on uint_index.t4 Output: a Recheck Cond: (t4.a = 1::bigint) -> Bitmap Index Scan on idx4 Index Cond: (t4.a = 1::bigint) (5 rows) -\c postgres -drop database uint_index; +drop schema uint_index cascade; +NOTICE: drop cascades to 4 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table t4 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_join.out b/contrib/dolphin/expected/uint_join.out index 62691e8e1..961986b9b 100644 --- a/contrib/dolphin/expected/uint_join.out +++ b/contrib/dolphin/expected/uint_join.out @@ -1,7 +1,5 @@ -drop database if exists uint_join; -NOTICE: database "uint_join" does not exist, skipping -create database uint_join dbcompatibility 'b'; -\c uint_join +create schema uint_join; +set current_schema to 'uint_join'; create table t1(a int2, b uint2); create table t2(a uint4, b uint4); insert into t1 values(1, 1); @@ -103,5 +101,8 @@ select /*+ mergejoin(t1 t2)*/ * from t1 join t2; -1 | 1 | 3 | 1 (9 rows) -\c postgres -drop database uint_join; +drop schema uint_join cascade; +NOTICE: drop cascades to 2 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mi.out b/contrib/dolphin/expected/uint_mi.out index 0fa0b418a..5666918d1 100644 --- a/contrib/dolphin/expected/uint_mi.out +++ b/contrib/dolphin/expected/uint_mi.out @@ -1,7 +1,5 @@ -drop database if exists uint_mi; -NOTICE: database "uint_mi" does not exist, skipping -create database uint_mi dbcompatibility 'b'; -\c uint_mi +create schema uint_mi; +set current_schema to 'uint_mi'; --uint8 select 18446744073709551615::uint8 - 0::int1; ?column? @@ -772,5 +770,5 @@ select 0::int1 - 1::uint4; ERROR: int unsigned out of range select 0::int1 - 1::uint8; ERROR: bigint unsigned out of range -\c postgres -drop database uint_mi +drop schema uint_mi cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mod.out b/contrib/dolphin/expected/uint_mod.out index 2a4b86aa7..6d09e47c7 100644 --- a/contrib/dolphin/expected/uint_mod.out +++ b/contrib/dolphin/expected/uint_mod.out @@ -1,7 +1,5 @@ -drop database if exists uint_mod; -NOTICE: database "uint_mod" does not exist, skipping -create database uint_mod dbcompatibility 'b'; -\c uint_mod +create schema uint_mod; +set current_schema to 'uint_mod'; --uint8 select 18446744073709551615::uint8 % 0::int1; ?column? @@ -1000,5 +998,5 @@ select 127::int1 % 1::uint8; 0 (1 row) -\c postgres -drop database uint_mod +drop schema uint_mod cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mod2.out b/contrib/dolphin/expected/uint_mod2.out index 8adac953d..9d137cc3a 100644 --- a/contrib/dolphin/expected/uint_mod2.out +++ b/contrib/dolphin/expected/uint_mod2.out @@ -1,7 +1,5 @@ -drop database if exists uint_mod2; -NOTICE: database "uint_mod2" does not exist, skipping -create database uint_mod2 dbcompatibility 'b'; -\c uint_mod2 +create schema uint_mod2; +set current_schema to 'uint_mod2'; --uint8 select 18446744073709551615::uint8 mod 0::int1; b_mod @@ -1000,5 +998,5 @@ select 127::int1 mod 1::uint8; 0 (1 row) -\c postgres -drop database uint_mod2 +drop schema uint_mod2 cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_mul.out b/contrib/dolphin/expected/uint_mul.out index 636c606ce..0024bc6eb 100644 --- a/contrib/dolphin/expected/uint_mul.out +++ b/contrib/dolphin/expected/uint_mul.out @@ -1,7 +1,5 @@ -drop database if exists uint_mul; -NOTICE: database "uint_mul" does not exist, skipping -create database uint_mul dbcompatibility 'b'; -\c uint_mul +create schema uint_mul; +set current_schema to 'uint_mul'; --uint8 select 18446744073709551615::uint8 * 0::int1; ?column? @@ -476,5 +474,5 @@ select 127::int1 * 1::uint8; 127 (1 row) -\c postgres -drop database uint_mul +drop schema uint_mul cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_numeric.out b/contrib/dolphin/expected/uint_numeric.out index de5f8b9cb..14faf565e 100644 --- a/contrib/dolphin/expected/uint_numeric.out +++ b/contrib/dolphin/expected/uint_numeric.out @@ -1,7 +1,5 @@ -drop database if exists uint_numeric; -NOTICE: database "uint_numeric" does not exist, skipping -create database uint_numeric dbcompatibility 'b'; -\c uint_numeric +create schema uint_numeric; +set current_schema to 'uint_numeric'; select (-1)::numeric::uint1; uint1 ------- @@ -323,5 +321,11 @@ CONTEXT: referenced column: a insert into t4 select c from num; ERROR: bigint unsigned out of range CONTEXT: referenced column: a -\c postgres -drop database uint_numeric; +drop schema uint_numeric cascade; +NOTICE: drop cascades to 5 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table t4 +drop cascades to table num +reset current_schema; diff --git a/contrib/dolphin/expected/uint_operator.out b/contrib/dolphin/expected/uint_operator.out index b10f4e8b1..cd4b59a88 100644 --- a/contrib/dolphin/expected/uint_operator.out +++ b/contrib/dolphin/expected/uint_operator.out @@ -1,7 +1,5 @@ -drop database if exists uint_operator; -NOTICE: database "uint_operator" does not exist, skipping -create database uint_operator dbcompatibility 'b'; -\c uint_operator +create schema uint_operator; +set current_schema to 'uint_operator'; -- > select 1::uint1 > 1::uint1; ?column? @@ -1622,5 +1620,5 @@ select ~0::uint8; 18446744073709551615 (1 row) -\c postgres -drop database uint_operator; +drop schema uint_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_or.out b/contrib/dolphin/expected/uint_or.out index 0951e4fef..ef6ff85e7 100644 --- a/contrib/dolphin/expected/uint_or.out +++ b/contrib/dolphin/expected/uint_or.out @@ -1,7 +1,5 @@ -drop database if exists uint_or; -NOTICE: database "uint_or" does not exist, skipping -create database uint_or dbcompatibility 'b'; -\c uint_or +create schema uint_or; +set current_schema to 'uint_or'; --uint8 select 18446744073709551615::uint8 | 0::int1; ?column? @@ -826,5 +824,5 @@ select 127::int1 | 1::uint8; 127 (1 row) -\c postgres -drop database uint_or +drop schema uint_or cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_partition.out b/contrib/dolphin/expected/uint_partition.out index 268403779..32c12030e 100644 --- a/contrib/dolphin/expected/uint_partition.out +++ b/contrib/dolphin/expected/uint_partition.out @@ -1,7 +1,5 @@ -drop database if exists uint_partition; -NOTICE: database "uint_partition" does not exist, skipping -create database uint_partition dbcompatibility 'b'; -\c uint_partition +create schema uint_partition; +set current_schema to 'uint_partition'; CREATE TABLE t1 ( col1 uint4 NOT NULL, @@ -255,5 +253,18 @@ insert into t_unsigned_0030_7 values(1); create table t_unsigned_0030_8(col01 bigint unsigned) partition by range(col01)(partition p start(1) end(255) every(50)); insert into t_unsigned_0030_8 values(1); -\c postgres -drop database uint_partition; +drop schema uint_partition cascade; +NOTICE: drop cascades to 12 other objects +DETAIL: drop cascades to table t1 +drop cascades to table t2 +drop cascades to table t3 +drop cascades to table a1 +drop cascades to table a2 +drop cascades to table subpartition_01 +drop cascades to table subpartition_02 +drop cascades to table subpartition_03 +drop cascades to table t_unsigned_0030_5 +drop cascades to table t_unsigned_0030_6 +drop cascades to table t_unsigned_0030_7 +drop cascades to table t_unsigned_0030_8 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_pl.out b/contrib/dolphin/expected/uint_pl.out index 8be8bfe6c..d6943bf9a 100644 --- a/contrib/dolphin/expected/uint_pl.out +++ b/contrib/dolphin/expected/uint_pl.out @@ -1,7 +1,5 @@ -drop database if exists uint_add; -NOTICE: database "uint_add" does not exist, skipping -create database uint_add dbcompatibility 'b'; -\c uint_add +create schema uint_add; +set current_schema to 'uint_add'; --uint8 select 18446744073709551615::uint8 + 0::int1; ?column? @@ -956,5 +954,5 @@ select 127::int1 + 4294967295::uint4; ERROR: int unsigned out of range select 127::int1 + 18446744073709551615::uint8; ERROR: bigint unsigned out of range -\c postgres -drop database uint_add +drop schema uint_add cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/uint_procedure_col_bypass.out b/contrib/dolphin/expected/uint_procedure_col_bypass.out index 22e73d6c8..034baab41 100644 --- a/contrib/dolphin/expected/uint_procedure_col_bypass.out +++ b/contrib/dolphin/expected/uint_procedure_col_bypass.out @@ -1,7 +1,5 @@ -drop database if exists uint_procedure_col; -NOTICE: database "uint_procedure_col" does not exist, skipping -create database uint_procedure_col dbcompatibility 'b'; -\c uint_procedure_col +create schema uint_procedure_col; +set current_schema to 'uint_procedure_col'; create procedure test_p1(uint2, uint4) SHIPPABLE VOLATILE as @@ -46,36 +44,40 @@ explain(costs off, verbose) insert into bypass values(1, 10); QUERY PLAN ----------------------------------------- [Bypass] - Insert on public.bypass + Insert on uint_procedure_col.bypass -> Result Output: '1'::uint2, '10'::uint4 (4 rows) explain(costs off, verbose) select b from bypass where a = 1; - QUERY PLAN ---------------------------- - Seq Scan on public.bypass + QUERY PLAN +--------------------------------------- + Seq Scan on uint_procedure_col.bypass Output: b Filter: (bypass.a = 1) (3 rows) explain(costs off, verbose) delete from bypass where b = 10; - QUERY PLAN ---------------------------------- - Delete on public.bypass - -> Seq Scan on public.bypass + QUERY PLAN +--------------------------------------------- + Delete on uint_procedure_col.bypass + -> Seq Scan on uint_procedure_col.bypass Output: ctid Filter: (bypass.b = 10) (4 rows) explain(costs off, verbose) update bypass set b = b + 1 where a = 1; - QUERY PLAN ----------------------------------- - Update on public.bypass - -> Seq Scan on public.bypass + QUERY PLAN +--------------------------------------------- + Update on uint_procedure_col.bypass + -> Seq Scan on uint_procedure_col.bypass Output: a, (b + 1), ctid Filter: (bypass.a = 1) (4 rows) -\c postgres -drop database uint_procedure_col; +drop schema uint_procedure_col cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to function test_p1(uint2,uint4) +drop cascades to table test1 +drop cascades to table bypass +reset current_schema; diff --git a/contrib/dolphin/expected/uint_smp.out b/contrib/dolphin/expected/uint_smp.out index 0c6bf9787..39470ac71 100644 --- a/contrib/dolphin/expected/uint_smp.out +++ b/contrib/dolphin/expected/uint_smp.out @@ -1,7 +1,5 @@ -drop database if exists uint_smp; -NOTICE: database "uint_smp" does not exist, skipping -create database uint_smp dbcompatibility 'b'; -\c uint_smp +create schema uint_smp; +set current_schema to 'uint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; drop table if exists t2 ; @@ -23,7 +21,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -33,15 +31,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -49,7 +47,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -59,15 +57,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 1; @@ -85,7 +83,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -95,15 +93,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -111,7 +109,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -121,19 +119,19 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a > 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a > 500)) -> Materialize - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a > 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a > 500) AND (uint_smp.t2.a = 2)) (15 rows) set query_dop = 1; @@ -145,7 +143,7 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (4 rows) @@ -155,15 +153,15 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a < 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a < 500)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500) AND (uint_smp.t2.a = 2)) (11 rows) set query_dop = 4; @@ -173,7 +171,7 @@ explain(costs off, verbose) select * from t2 where a = 2; [No Bypass]reason: Bypass not executed because query's scan operator is not index. Streaming(type: LOCAL GATHER dop: 1/4) Output: a - -> Seq Scan on public.t2 + -> Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2) (6 rows) @@ -183,19 +181,19 @@ explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a f ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2) AND (public.t2.a < 500)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2) AND (uint_smp.t2.a < 500)) -> Materialize - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500) AND (public.t2.a = 2)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500) AND (uint_smp.t2.a = 2)) (15 rows) set query_dop = 1; @@ -207,25 +205,25 @@ explain(costs off, verbose) select * from t2 where a = 2; QUERY PLAN ------------------------------------------------------------------------------------ [No Bypass]reason: Bypass not executed because query's scan operator is not index. - Seq Scan on public.t2 + Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2::bigint) (4 rows) explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a from t2 where a < 500); - QUERY PLAN ------------------------------------------------------------------------------------- + QUERY PLAN +--------------------------------------------------------------------------------------- [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2::bigint) AND (public.t2.a < 500::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2::bigint) AND (uint_smp.t2.a < 500::bigint)) -> Materialize - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500::bigint) AND (public.t2.a = 2::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500::bigint) AND (uint_smp.t2.a = 2::bigint)) (11 rows) set query_dop = 4; @@ -235,29 +233,29 @@ explain(costs off, verbose) select * from t2 where a = 2; [No Bypass]reason: Bypass not executed because query's scan operator is not index. Streaming(type: LOCAL GATHER dop: 1/4) Output: a - -> Seq Scan on public.t2 + -> Seq Scan on uint_smp.t2 Output: a Filter: (t2.a = 2::bigint) (6 rows) explain(costs off, verbose) select * from t2 where a = 2 and t2.a in (select a from t2 where a < 500); - QUERY PLAN ------------------------------------------------------------------------------------------ + QUERY PLAN +--------------------------------------------------------------------------------------------- [No Bypass]reason: Bypass not executed because query's scan operator is not index. Nested Loop Semi Join - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a = 2::bigint) AND (public.t2.a < 500::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a = 2::bigint) AND (uint_smp.t2.a < 500::bigint)) -> Materialize - Output: public.t2.a + Output: uint_smp.t2.a -> Streaming(type: LOCAL GATHER dop: 1/4) - Output: public.t2.a - -> Seq Scan on public.t2 - Output: public.t2.a - Filter: ((public.t2.a < 500::bigint) AND (public.t2.a = 2::bigint)) + Output: uint_smp.t2.a + -> Seq Scan on uint_smp.t2 + Output: uint_smp.t2.a + Filter: ((uint_smp.t2.a < 500::bigint) AND (uint_smp.t2.a = 2::bigint)) (15 rows) set query_dop = 1; @@ -274,13 +272,13 @@ explain(costs off, verbose) select * from join_1 join join_2; Output: join_1.a, join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (12 rows) @@ -292,13 +290,13 @@ explain(costs off, verbose) select * from join_1 join join_2 on join_1.a = join_ Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -310,13 +308,13 @@ explain(costs off, verbose) select * from join_1 left join join_2 on join_1.a = Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -328,13 +326,13 @@ explain(costs off, verbose) select * from join_1 right join join_2 on join_1.a = Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -346,13 +344,13 @@ explain(costs off, verbose) select * from join_1 inner join join_2 on join_1.a = Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -364,13 +362,13 @@ explain(costs off, verbose) select /*+ nestloop(join_1 join_2)*/ * from join_1 l Join Filter: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Materialize Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -382,13 +380,13 @@ explain(costs off, verbose) select /*+ hashjoin(join_1 join_2)*/ * from join_1 l Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) @@ -401,15 +399,19 @@ WARNING: unused hint: MergeJoin(join_1 join_2) Hash Cond: (join_1.a = join_2.a) -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_1.a - -> Seq Scan on public.join_1 + -> Seq Scan on uint_smp.join_1 Output: join_1.a -> Hash Output: join_2.a -> Streaming(type: LOCAL GATHER dop: 1/2) Output: join_2.a - -> Seq Scan on public.join_2 + -> Seq Scan on uint_smp.join_2 Output: join_2.a (13 rows) -\c postgres -drop database uint_smp; +drop schema uint_smp cascade; +NOTICE: drop cascades to 3 other objects +DETAIL: drop cascades to table t2 +drop cascades to table join_1 +drop cascades to table join_2 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_sql_mode.out b/contrib/dolphin/expected/uint_sql_mode.out index 799f2dfee..dd0c107f9 100644 --- a/contrib/dolphin/expected/uint_sql_mode.out +++ b/contrib/dolphin/expected/uint_sql_mode.out @@ -1,7 +1,5 @@ -drop database if exists uint_sql_mode; -NOTICE: database "uint_sql_mode" does not exist, skipping -create database uint_sql_mode dbcompatibility 'b'; -\c uint_sql_mode +create schema uint_sql_mode; +set current_schema to 'uint_sql_mode'; set dolphin.sql_mode = ''; select (-1)::bool::uint8; uint8 @@ -2210,5 +2208,6 @@ select * from t1; 255 (61 rows) -\c postgres -drop database uint_sql_mode; +drop schema uint_sql_mode cascade; +NOTICE: drop cascades to table t1 +reset current_schema; diff --git a/contrib/dolphin/expected/uint_xor.out b/contrib/dolphin/expected/uint_xor.out index 5941ad609..03bb8ad15 100644 --- a/contrib/dolphin/expected/uint_xor.out +++ b/contrib/dolphin/expected/uint_xor.out @@ -1,7 +1,5 @@ -drop database if exists uint_xor; -NOTICE: database "uint_xor" does not exist, skipping -create database uint_xor dbcompatibility 'b'; -\c uint_xor +create schema uint_xor; +set current_schema to 'uint_xor'; --uint8 select 18446744073709551615::uint8 # 0::int1; ?column? @@ -826,5 +824,5 @@ select 127::int1 # 1::uint8; 126 (1 row) -\c postgres -drop database uint_xor +drop schema uint_xor cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/upsert.out b/contrib/dolphin/expected/upsert.out index 45723f1e4..ba1aaae74 100644 --- a/contrib/dolphin/expected/upsert.out +++ b/contrib/dolphin/expected/upsert.out @@ -1,7 +1,5 @@ -drop database if exists upsert; -NOTICE: database "upsert" does not exist, skipping -create database upsert dbcompatibility 'b'; -\c upsert +create schema upsert; +set current_schema to 'upsert'; --normal test --primary key and unique on multiple column create table normal_01(c1 int, c2 int, c3 int, c4 int unique, c5 int primary key, unique(c2,c3)); @@ -805,5 +803,28 @@ select * from subpartition_03; 2 | 2 | 1 | 1 (1 row) -\c postgres -drop database upsert +drop schema upsert cascade; +NOTICE: drop cascades to 22 other objects +DETAIL: drop cascades to table normal_01 +drop cascades to table normal_02 +drop cascades to table normal_03 +drop cascades to table normal_04 +drop cascades to table ustore_01 +drop cascades to table ustore_02 +drop cascades to table ustore_03 +drop cascades to table ustore_04 +drop cascades to table t6 +drop cascades to table segment_01 +drop cascades to table segment_02 +drop cascades to table segment_03 +drop cascades to table segment_04 +drop cascades to table partition_01 +drop cascades to table partition_02 +drop cascades to table partition_03 +drop cascades to table partition_04 +drop cascades to table partition_05 +drop cascades to table partition_06 +drop cascades to table subpartition_01 +drop cascades to table subpartition_02 +drop cascades to table subpartition_03 +reset current_schema; diff --git a/contrib/dolphin/expected/use_dbname.out b/contrib/dolphin/expected/use_dbname.out index 619af9f7b..ce3dafd26 100644 --- a/contrib/dolphin/expected/use_dbname.out +++ b/contrib/dolphin/expected/use_dbname.out @@ -1,7 +1,5 @@ -drop database if exists use_dbname; -NOTICE: database "use_dbname" does not exist, skipping -create database use_dbname dbcompatibility 'b'; -\c use_dbname +create schema use_dbname; +set current_schema to 'use_dbname'; CREATE schema db1; CREATE schema db2; USE db1; @@ -35,5 +33,5 @@ select a from test; db1 (1 row) -\c postgres -drop database if exists use_dbname; +drop schema use_dbname cascade; +reset current_schema; diff --git a/contrib/dolphin/expected/vec_engine.out b/contrib/dolphin/expected/vec_engine.out index 3d104f1ab..7c0a8e7cd 100644 --- a/contrib/dolphin/expected/vec_engine.out +++ b/contrib/dolphin/expected/vec_engine.out @@ -1,5 +1,5 @@ -create database vec_engine_test dbcompatibility 'b'; -\c vec_engine_test +create schema vec_engine_test; +set current_schema to 'vec_engine_test'; CREATE TABLE customer ( c_custkey integer NOT NULL, c_name character varying(25) NOT NULL, @@ -128,5 +128,12 @@ explain (costs off) select Filter: (r_name = 'ASIA'::bpchar) (23 rows) -\c postgres -drop database vec_engine_test; +drop schema vec_engine_test cascade; +NOTICE: drop cascades to 6 other objects +DETAIL: drop cascades to table customer +drop cascades to table orders +drop cascades to table lineitem +drop cascades to table supplier +drop cascades to table nation +drop cascades to table region +reset current_schema; diff --git a/contrib/dolphin/expected/zerofill.out b/contrib/dolphin/expected/zerofill.out index 7450c909c..e25ca0ef5 100644 --- a/contrib/dolphin/expected/zerofill.out +++ b/contrib/dolphin/expected/zerofill.out @@ -1,7 +1,5 @@ -drop database if exists db_zerofill; -NOTICE: database "db_zerofill" does not exist, skipping -create database db_zerofill dbcompatibility 'b'; -\c db_zerofill +create schema db_zerofill; +set current_schema to 'db_zerofill'; create table t1_zerofill ( a int(5) zerofill, b integer(5) zerofill unsigned, @@ -14,7 +12,7 @@ create table t1_zerofill ( i int4 unsigned zerofill, j int8 zerofill); \d+ t1_zerofill - Table "public.t1_zerofill" + Table "db_zerofill.t1_zerofill" Column | Type | Modifiers | Storage | Stats target | Description --------+-------+-----------+---------+--------------+------------- a | uint4 | | plain | | @@ -39,5 +37,6 @@ create table t2_zerofill (a double precision zerofill); ERROR: syntax error at or near "zerofill" LINE 1: create table t2_zerofill (a double precision zerofill); ^ -\c postgres -drop database if exists db_zerofill; +drop schema db_zerofill cascade; +NOTICE: drop cascades to table t1_zerofill +reset current_schema; diff --git a/contrib/dolphin/parallel_schedule_dolphin b/contrib/dolphin/parallel_schedule_dolphin index cddfff106..1c6982c34 100644 --- a/contrib/dolphin/parallel_schedule_dolphin +++ b/contrib/dolphin/parallel_schedule_dolphin @@ -4,36 +4,27 @@ test: keyword_ignore_test/ignore_no_matched_partition keyword_ignore_test/ignore test: string_func_test/db_b_ascii_test string_func_test/db_b_left_right_test string_func_test/db_b_quote_test string_func_test/db_b_string_length_test string_func_test/db_b_substr_test string_func_test/db_b_trim_test string_func_test/db_b_insert_test -test: ast b_compatibility_time_type db_b_new_gram_test - -test: test_condition vec_engine - -test: group_concat_test +test: ast b_compatibility_time_type db_b_new_gram_test group_concat_test test_condition vec_engine test: db_b_parser1 db_b_parser2 db_b_parser3 db_b_parser4 second_microsecond -test: db_b_plpgsql_test default_guc describe explain_desc +test: db_b_plpgsql_test default_guc describe explain_desc kill set_password network test: empty_value_list empty_value_lists empty_value_support_value test: greatest_least join_without_on mysqlmode_fullgroup mysqlmode_strict mysqlmode_strict2 none_strict_warning test_alter_table -test: kill set_password network - # must be single test group, cause other connection will affect the result test: test_shows test: test_shows_1 test_shows_2 -test: test_shows_3 test_shows_4 - -test: test_shows_5 - -test: test_optimize +# must be single test group, cause other connection will affect the result +test: test_shows_3 -test: test_set_charset +test: test_shows_4 test_shows_5 -test: nvarchar regexp upsert zerofill +test: nvarchar regexp upsert zerofill test_set_charset test_optimize test: test_binary test_blob test_datatype test_fixed test_mysql_enum @@ -43,10 +34,6 @@ test: builtin_funcs/bin builtin_funcs/char builtin_funcs/char_length builtin_fun test: builtin_funcs/db_b_hex builtin_funcs/db_b_if builtin_funcs/elt builtin_funcs/field builtin_funcs/find_in_set builtin_funcs/soundex builtin_funcs/space builtin_funcs/make_set builtin_funcs/between builtin_funcs/not_between -test: revoke - -test: option - test: uint_agg uint_and uint_cast uint_cast2 uint_div uint_mi uint_mod uint_mod2 uint_mul uint_numeric uint_operator uint_or uint_partition uint_pl uint_xor test: uint_procedure_col_bypass uint_smp uint_index uint_sql_mode uint_ignore uint_join signed_unsigned_cast uint_in implicit_cast uint_auto_increment @@ -77,24 +64,14 @@ test: create_function_test/m_type_create_proc test: create_function_test/deterministic create_function_test/language_sql create_function_test/sql_options create_function_test/sql_security create_function_test/call_function create_function_test/commentsharp create_function_test/single_line_proc -test: b_do_statment +test: b_do_statment revoke option test_table_index test_float_double_real_double_precision_MD single_line_trigger prefixkey_index test: alter_function_test/alter_function alter_function_test/alter_procedure alter_function_test/language_sql replace_test/replace -test: network2 use_dbname show_create view_definer_test insert_set show_create_database show_variables +test: network2 use_dbname show_create view_definer_test insert_set show_create_database show_variables b_auto_increment test: partition_test1 partition_test2 partition_test3 partition_test4 partition_maxvalue_test -test: test_table_index - -test: test_float_double_real_double_precision_MD - -test: single_line_trigger - -test: db_b_date_time_functions - -test: prefixkey_index b_auto_increment - test: builtin_funcs/b_compatibility_time_funcs builtin_funcs/b_compatibility_time_funcs2 builtin_funcs/b_compatibility_time_funcs3 # case sensitive test, do not insert test case */ @@ -133,35 +110,22 @@ test: test_mysql_operator test_op_xor_unsignedint test_op_blob test_op_xor_boola test: json_array json_object json_quote json_contains json_contains_path json_extract json_unquote json_keys json_search json_array_append -test: db_b_date_time_functions2 -test: show b_comments - -test: ansi_quotes_start - -test: pl_debugger_server pl_debugger_client +test: show b_comments ansi_quotes_start -test: ansi_quotes_test +test: pl_debugger_server pl_debugger_client load load2 flush ansi_quotes_test -test: db_b_date_time_functions3 +test: db_b_date_time_functions db_b_date_time_functions2 db_b_date_time_functions3 db_b_date_time_functions4 test: json_array_insert json_insert json_merge_patch json_merge_preserve json_remove json_replace json_set json_depth -test: flush - test: oct string_func_test/db_b_from_base64_test string_func_test/test_substring_index string_func_test/db_b_ord_test -test: db_b_date_time_functions4 - -test: load load2 - test: like_default_test conv_cast_test read_only_guc_test test: string_func_test/db_b_to_base64_test string_func_test/db_b_unhex_test bit_count test_current_user test: test_schema connection_id test_system_user test_bit_xor -test: builtin_funcs/cast any_value_test - -test: default_function get_b_database +test: builtin_funcs/cast any_value_test default_function get_b_database test: json_type json_pretty json_valid json_length json_objectagg json_arrayagg json_operator json_storage_size diff --git a/contrib/dolphin/sql/alter_function_test/alter_function.sql b/contrib/dolphin/sql/alter_function_test/alter_function.sql index f346b962e..356373cc0 100755 --- a/contrib/dolphin/sql/alter_function_test/alter_function.sql +++ b/contrib/dolphin/sql/alter_function_test/alter_function.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_func_1; -create database db_alter_func_1 dbcompatibility 'B'; -\c db_alter_func_1 +create schema db_alter_func_1; +set current_schema to 'db_alter_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; @@ -103,5 +102,5 @@ select f5() ; select * from t1; -\c postgres -drop database db_alter_func_1; +drop schema db_alter_func_1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/alter_function_test/alter_procedure.sql b/contrib/dolphin/sql/alter_function_test/alter_procedure.sql index 33a9d2e87..912f17bd9 100755 --- a/contrib/dolphin/sql/alter_function_test/alter_procedure.sql +++ b/contrib/dolphin/sql/alter_function_test/alter_procedure.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_func_2; -create database db_alter_func_2 dbcompatibility 'B'; -\c db_alter_func_2 +create schema db_alter_func_2; +set current_schema to 'db_alter_func_2'; CREATE OR REPLACE PROCEDURE proc1() AS @@ -50,5 +49,5 @@ END; -- 修改不存在的存储过程 ALTER PROCEDURE proc2 READS SQL DATA; -\c postgres -drop database db_alter_func_2; +drop schema db_alter_func_2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/alter_function_test/language_sql.sql b/contrib/dolphin/sql/alter_function_test/language_sql.sql index e34993d5c..f5b92fa5d 100644 --- a/contrib/dolphin/sql/alter_function_test/language_sql.sql +++ b/contrib/dolphin/sql/alter_function_test/language_sql.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_func_sql; -create database db_alter_func_sql dbcompatibility 'B'; -\c db_alter_func_sql +create schema db_alter_func_sql; +set current_schema to 'db_alter_func_sql'; -- test func @@ -173,5 +172,5 @@ call pro_2(1,2,'a'); call pro_3(1,2,'a'); -\c postgres -drop database db_alter_func_sql; +drop schema db_alter_func_sql cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/ansi_quotes_start.sql b/contrib/dolphin/sql/ansi_quotes_start.sql index bde70e0aa..8855fe711 100644 --- a/contrib/dolphin/sql/ansi_quotes_start.sql +++ b/contrib/dolphin/sql/ansi_quotes_start.sql @@ -1,2 +1 @@ -CREATE DATABASE test_ansi_quotes DBCOMPATIBILITY 'B'; -\c test_ansi_quotes +create schema test_ansi_quotes; diff --git a/contrib/dolphin/sql/ansi_quotes_test.sql b/contrib/dolphin/sql/ansi_quotes_test.sql index e9a65a3b2..fbd53ccf0 100644 --- a/contrib/dolphin/sql/ansi_quotes_test.sql +++ b/contrib/dolphin/sql/ansi_quotes_test.sql @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; CREATE TABLE test_quotes (a text); show dolphin.sql_mode; @@ -38,5 +38,5 @@ select "test" != "test"; select * from test_quotes where a like "%test%"; select * from test_quotes where a = "test1"; -\c postgres -DROP DATABASE test_ansi_quotes; \ No newline at end of file +drop schema test_ansi_quotes cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/any_value_test.sql b/contrib/dolphin/sql/any_value_test.sql index be091aa1a..678619afe 100644 --- a/contrib/dolphin/sql/any_value_test.sql +++ b/contrib/dolphin/sql/any_value_test.sql @@ -1,6 +1,5 @@ -drop DATABASE if exists any_value_test; -CREATE DATABASE any_value_test dbcompatibility 'B'; -\c any_value_test; +create schema any_value_test; +set current_schema to 'any_value_test'; --test int type create table test_int1(a tinyint, b int); @@ -85,5 +84,5 @@ insert into test_blob_bytea values(2,'abcd',E'\\xeabc'); select any_value(b) from test_blob_bytea group by a; select any_value(c) from test_blob_bytea group by a; -\c postgres; -drop DATABASE if exists any_value_test; \ No newline at end of file +drop schema any_value_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/ast.sql b/contrib/dolphin/sql/ast.sql index 2d0abb4be..471d153e3 100644 --- a/contrib/dolphin/sql/ast.sql +++ b/contrib/dolphin/sql/ast.sql @@ -1,11 +1,10 @@ -drop database if exists ast_test; -create database ast_test dbcompatibility 'b'; -\c ast_test +create schema ast_test; +set current_schema to 'ast_test'; ast select * from test; ast create table test(id int); ast create table test(id int(5)); ast USE "custcomcenter"; ast select 1;select 1; ast select 1;ast select 1; -\c postgres -drop database ast_test; \ No newline at end of file +drop schema ast_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/b_comments.sql b/contrib/dolphin/sql/b_comments.sql index 0852f1fac..3233dfd4e 100644 --- a/contrib/dolphin/sql/b_comments.sql +++ b/contrib/dolphin/sql/b_comments.sql @@ -1,4 +1,6 @@ /* unsupported */ +create database b_comments dbcompatibility 'A'; +\c b_comments create schema b_comments; set search_path to 'b_comments'; create table test_row(a int not null comment 'test_row.a'); @@ -32,10 +34,9 @@ create procedure test_alter_procedure(int,int) as begin select $1 + $2;end; / alter procedure test_alter_procedure(int,int) comment 'test_alter_procedure'; drop schema b_comments cascade; +\c contrib_regression +drop database b_comments; - -create database b_comments dbcompatibility 'B'; -\c b_comments create schema b_comments; set search_path to 'b_comments'; /* unsupported */ @@ -177,6 +178,4 @@ from pg_description pd on pd.objoid = pc.oid where pc.relname = 'uq_0034'; drop schema b_comments cascade; -reset search_path; -\c postgres -drop database b_comments; \ No newline at end of file +reset search_path; \ No newline at end of file diff --git a/contrib/dolphin/sql/b_do_statment.sql b/contrib/dolphin/sql/b_do_statment.sql index ddd26ed17..170107f1f 100644 --- a/contrib/dolphin/sql/b_do_statment.sql +++ b/contrib/dolphin/sql/b_do_statment.sql @@ -1,5 +1,5 @@ -create database db_do_stmt dbcompatibility = 'B'; -\c db_do_stmt +create schema db_do_stmt; +set current_schema to 'db_do_stmt'; create table t1 (a int); insert into t1 values(1),(4),(7); @@ -49,5 +49,5 @@ LANGUAGE SQL; do sin(a) from t1; -\c regress -drop database db_do_stmt; +drop schema db_do_stmt cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/bit_count.sql b/contrib/dolphin/sql/bit_count.sql index ff4e2324c..ede31c8e3 100644 --- a/contrib/dolphin/sql/bit_count.sql +++ b/contrib/dolphin/sql/bit_count.sql @@ -1,6 +1,5 @@ -drop database if exists test_bit_count; -create database test_bit_count dbcompatibility 'b'; -\c test_bit_count +create schema test_bit_count; +set current_schema to 'test_bit_count'; -- 测试数字,字符串,二进制输入 SELECT bit_count(29); @@ -51,5 +50,5 @@ select bit_count(b'1111111111111111111111111111111111111111111111111111111111111 select bit_count(b'1111111111111111111111111111111111111111111111111111111111111111'); select bit_count(b'10000000111111111111111111111111111111111111111111111111111111111111111'); -\c postgres -drop database test_bit_count; \ No newline at end of file +drop schema test_bit_count cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql index b6c4fb0f0..1a76bdb38 100644 --- a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql +++ b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs.sql @@ -1,9 +1,5 @@ --- --- Test All Time function under 'b' compatibility --- -drop database if exists b_time_funcs; -create database b_time_funcs dbcompatibility 'b'; -\c b_time_funcs +create schema b_time_funcs; +set current_schema to 'b_time_funcs'; create table func_test(functionName varchar(256),result varchar(256)); truncate table func_test; @@ -243,5 +239,5 @@ insert into insert_subdate(date_col, datetime_col) values (subdate('2021-1-1', 1 drop table insert_subdate; select * from func_test; -\c postgres -drop database if exists b_time_funcs; +drop schema b_time_funcs cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql index fa1c66d31..cbf124309 100644 --- a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql +++ b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs2.sql @@ -2,9 +2,8 @@ -- Test Time functions(Stage 2) under 'b' compatibility -- Contains subtime()、timediff()、time()、time_format()、timestamp()、timestampadd() -- -drop database if exists b_time_funcs2; -create database b_time_funcs2 dbcompatibility 'b'; -\c b_time_funcs2 +create schema b_time_funcs2; +set current_schema to 'b_time_funcs2'; create table func_test2(functionName varchar(256),result varchar(256)); truncate table func_test2; @@ -516,5 +515,5 @@ drop table t1; drop table t2; select * from func_test2; -\c postgres -drop database if exists b_time_funcs2; +drop schema b_time_funcs2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql index c66032810..0eb658af7 100644 --- a/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql +++ b/contrib/dolphin/sql/builtin_funcs/b_compatibility_time_funcs3.sql @@ -2,9 +2,8 @@ -- Test Time functions(Stage 3) under 'b' compatibility -- Contains to_days(), to_seconds(), unix_timestamp(), utc_date(), utc_time()、timestampadd() -- -drop database if exists b_time_funcs3; -create database b_time_funcs3 dbcompatibility 'b'; -\c b_time_funcs3 +create schema b_time_funcs3; +set current_schema to 'b_time_funcs3'; create table func_test3(functionName varchar(256),result varchar(256)); truncate table func_test3; @@ -189,5 +188,5 @@ insert into func_test3(functionName, result) values('UTC_TIMESTAMP(6)', UTC_TIME insert into func_test3(functionName, result) values('UTC_TIMESTAMP(-1)', UTC_TIMESTAMP(-1)); select * from func_test3; -\c postgres -drop database if exists b_time_funcs3; +drop schema b_time_funcs3 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/between.sql b/contrib/dolphin/sql/builtin_funcs/between.sql index a83fa3369..3bdd701f3 100644 --- a/contrib/dolphin/sql/builtin_funcs/between.sql +++ b/contrib/dolphin/sql/builtin_funcs/between.sql @@ -1,6 +1,5 @@ -drop database if exists db_between; -create database db_between dbcompatibility 'B'; -\c db_between +create schema db_between; +set current_schema to 'db_between'; select 2 between 2 and 23; select 2.1 between 2.1 and 12.3; select true between false and true; @@ -85,5 +84,5 @@ select * from t_between_and_0023; select distinct c_town from t_between_and_0023 where c_town between 'b' and 'n'; select distinct c_town from t_between_and_0023 where c_town between 'b' and 'nz'; drop table t_between_and_0023; -\c postgres -drop database if exists db_between; \ No newline at end of file +drop schema db_between cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/bin.sql b/contrib/dolphin/sql/builtin_funcs/bin.sql index 99aa2957f..b6cb0af9f 100644 --- a/contrib/dolphin/sql/builtin_funcs/bin.sql +++ b/contrib/dolphin/sql/builtin_funcs/bin.sql @@ -1,6 +1,5 @@ -drop database if exists db_bin; -create database db_bin dbcompatibility 'B'; -\c db_bin +create schema db_bin; +set current_schema to 'db_bin'; select bin(1); select bin(0); select bin('2'); @@ -11,5 +10,5 @@ select bin(null); select bin(true); select bin(false); select bin('测试'); -\c postgres -drop database if exists db_bin; +drop schema db_bin cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/cast.sql b/contrib/dolphin/sql/builtin_funcs/cast.sql index f46343b5d..dc462042c 100644 --- a/contrib/dolphin/sql/builtin_funcs/cast.sql +++ b/contrib/dolphin/sql/builtin_funcs/cast.sql @@ -1,6 +1,5 @@ -drop database if exists db_cast; -create database db_cast dbcompatibility 'B'; -\c db_cast +create schema db_cast; +set current_schema to 'db_cast'; select cast('$2'::money as unsigned); select cast(cast('$2'::money as unsigned) as money); @@ -10,5 +9,5 @@ select cast('2022-11-10 18:03:20'::timestamp as unsigned); select cast(current_timestamp::timestamp as unsigned); select cast(cast('2022-11-10 18:03:20'::timestamp as unsigned) as timestamp); -\c postgres -drop database if exists db_cast; +drop schema db_cast cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/char.sql b/contrib/dolphin/sql/builtin_funcs/char.sql index c00e8a6c1..8f92ffbe4 100644 --- a/contrib/dolphin/sql/builtin_funcs/char.sql +++ b/contrib/dolphin/sql/builtin_funcs/char.sql @@ -1,6 +1,5 @@ -drop database if exists db_char; -create database db_char dbcompatibility 'B'; -\c db_char +create schema db_char; +set current_schema to 'db_char'; select char(67,66,67); select char('65','66','67'); select char('A','B','C'); @@ -22,5 +21,5 @@ select char('hiu78','-156nfjl',175.99,'测试'); select char('侧四',-156,55.99,'ceshi'); select char('hi测试u158','ceshi',135.99,146); select char('hiu158','测试',125.99,146); -\c postgres -drop database if exists db_char; +drop schema db_char cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/char_length.sql b/contrib/dolphin/sql/builtin_funcs/char_length.sql index 9d1b4b2da..444beccc7 100644 --- a/contrib/dolphin/sql/builtin_funcs/char_length.sql +++ b/contrib/dolphin/sql/builtin_funcs/char_length.sql @@ -1,6 +1,5 @@ -drop database if exists db_char_length; -create database db_char_length dbcompatibility 'B'; -\c db_char_length +create schema db_char_length; +set current_schema to 'db_char_length'; select char_length(1234); select char_length(1234.5); @@ -18,8 +17,8 @@ select char_length('测试'); select char_length('测试123'); select char_length(true); -\c postgres -drop database if exists db_char_length; +drop schema db_char_length cascade; +reset current_schema; drop database if exists db_char_length_gbk; create database db_char_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; \c db_char_length_gbk diff --git a/contrib/dolphin/sql/builtin_funcs/character_length.sql b/contrib/dolphin/sql/builtin_funcs/character_length.sql index 67019aef7..9c142fb65 100644 --- a/contrib/dolphin/sql/builtin_funcs/character_length.sql +++ b/contrib/dolphin/sql/builtin_funcs/character_length.sql @@ -1,6 +1,5 @@ -drop database if exists db_character_length; -create database db_character_length dbcompatibility 'B'; -\c db_character_length +create schema db_character_length; +set current_schema to 'db_character_length'; select character_length(1234); select character_length(1234.5); @@ -18,8 +17,8 @@ select character_length('测试'); select character_length('测试123.45'); select character_length(true); -\c postgres -drop database if exists db_character_length; +drop schema db_character_length cascade; +reset current_schema; drop database if exists db_character_length_gbk; create database db_character_length_gbk dbcompatibility 'B' encoding 'GBK' LC_CTYPE 'zh_CN.gbk' lc_collate 'zh_CN.gbk'; \c db_character_length_gbk diff --git a/contrib/dolphin/sql/builtin_funcs/conv.sql b/contrib/dolphin/sql/builtin_funcs/conv.sql index e70fc6f62..f8d09b247 100644 --- a/contrib/dolphin/sql/builtin_funcs/conv.sql +++ b/contrib/dolphin/sql/builtin_funcs/conv.sql @@ -1,6 +1,5 @@ -drop database if exists db_conv; -create database db_conv dbcompatibility 'B'; -\c db_conv +create schema db_conv; +set current_schema to 'db_conv'; select conv('a',16,2); select conv('6e',18,8); @@ -97,5 +96,5 @@ select conv(-9544646155975628532428411,10,-10); select conv(-9544646155975628532428411,-10,10); select conv(-9544646155975628532428411,-10,-10); -\c postgres -drop database if exists db_conv; +drop schema db_conv cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/convert.sql b/contrib/dolphin/sql/builtin_funcs/convert.sql index ffed2d6d9..c899480bc 100644 --- a/contrib/dolphin/sql/builtin_funcs/convert.sql +++ b/contrib/dolphin/sql/builtin_funcs/convert.sql @@ -1,6 +1,5 @@ -drop database if exists db_convert; -create database db_convert dbcompatibility 'B'; -\c db_convert +create schema db_convert; +set current_schema to 'db_convert'; select convert(1 using 'utf8'); select convert('1' using 'utf8'); select convert('a' using 'utf8'); @@ -14,5 +13,5 @@ select convert('测试' using 'utf8'); select convert('测试' using utf8); select convert(11.1, decimal(10,3)); select convert(1 using decimal(10,3)); -\c postgres -drop database if exists db_convert; +drop schema db_convert cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/crc32.sql b/contrib/dolphin/sql/builtin_funcs/crc32.sql index 7edbac123..3f814c446 100644 --- a/contrib/dolphin/sql/builtin_funcs/crc32.sql +++ b/contrib/dolphin/sql/builtin_funcs/crc32.sql @@ -1,6 +1,5 @@ -drop database if exists db_crc32; -create database db_crc32 dbcompatibility 'B'; -\c db_crc32 +create schema db_crc32; +set current_schema to 'db_crc32'; select crc32('abc'); select crc32(''); @@ -8,5 +7,5 @@ select crc32(1); select crc32(10),crc32(-3.1415926),crc32(1.339E5),crc32('ab57'),crc32('HAF47'); -\c postgres -drop database if exists db_crc32; +drop schema db_crc32 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/db_b_format.sql b/contrib/dolphin/sql/builtin_funcs/db_b_format.sql index 092ae8abf..8284272f0 100644 --- a/contrib/dolphin/sql/builtin_funcs/db_b_format.sql +++ b/contrib/dolphin/sql/builtin_funcs/db_b_format.sql @@ -1,6 +1,5 @@ -drop database if exists db_db_b_format; -create database db_db_b_format dbcompatibility 'B'; -\c db_db_b_format +create schema db_db_b_format; +set current_schema to 'db_db_b_format'; -- test for b_compatibility_mode = false select format(1234.456, 2); select format(1234.456, 2, 'en_US'); @@ -137,8 +136,8 @@ select format('%s, %s', variadic array[true, false]::text[]); select format('%2$s, %1$s', variadic array['first', 'second']); select format('%2$s, %1$s', variadic array[1, 2]); -\c postgres -drop database db_db_b_format; +drop schema db_db_b_format cascade; +reset current_schema; -- test for A compatibility to ensure the original functionality is good. create database db_db_b_format dbcompatibility 'A'; diff --git a/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql b/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql index 42636b6b5..94d41415b 100644 --- a/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql +++ b/contrib/dolphin/sql/builtin_funcs/db_b_hex.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_hex; -create database db_b_hex dbcompatibility 'B'; -\c db_b_hex +create schema db_b_hex; +set current_schema to 'db_b_hex'; select hex(int1(255)); select hex(int1(256)); @@ -56,5 +55,5 @@ create table bytea_to_hex_test(c1 bytea); insert into bytea_to_hex_test values (E'\\xDEADBEEF'); select hex(c1) from bytea_to_hex_test; -\c postgres -drop database if exists db_b_hex; +drop schema db_b_hex cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/db_b_if.sql b/contrib/dolphin/sql/builtin_funcs/db_b_if.sql index 278b90afb..f860e1ab8 100644 --- a/contrib/dolphin/sql/builtin_funcs/db_b_if.sql +++ b/contrib/dolphin/sql/builtin_funcs/db_b_if.sql @@ -1,6 +1,6 @@ -drop database if exists db_b_if; -create database db_b_if dbcompatibility 'B'; -\c db_b_if +create schema db_b_if; +set current_schema to 'db_b_if'; + select if(TRUE, 1, 2); select if(FALSE, 1, 2); @@ -77,5 +77,5 @@ select if (true, 1.1::float8, true) as a, if (false, 1.1::float8, true) as b; -- numeric to boolean select if (true, 2.2::numeric(10, 2), true) as a, if (false, 2.2::numeric(10, 2), true) as b; -\c postgres -drop database if exists db_b_if; \ No newline at end of file +drop schema db_b_if cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/elt.sql b/contrib/dolphin/sql/builtin_funcs/elt.sql index d424482fa..fcc3b5633 100644 --- a/contrib/dolphin/sql/builtin_funcs/elt.sql +++ b/contrib/dolphin/sql/builtin_funcs/elt.sql @@ -1,6 +1,5 @@ -drop database if exists db_elt; -create database db_elt dbcompatibility 'B'; -\c db_elt +create schema db_elt; +set current_schema to 'db_elt'; select elt(1,1); select elt(-1,1); select elt(1.2,'a'); @@ -19,5 +18,5 @@ select elt('1',1); select elt('1',1.2); select elt(1,'a','b'); select elt(1,'a',2); -\c postgres -drop database if exists db_elt; +drop schema db_elt cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/field.sql b/contrib/dolphin/sql/builtin_funcs/field.sql index 323a95289..8496445c1 100644 --- a/contrib/dolphin/sql/builtin_funcs/field.sql +++ b/contrib/dolphin/sql/builtin_funcs/field.sql @@ -1,6 +1,5 @@ -drop database if exists db_field; -create database db_field dbcompatibility 'B'; -\c db_field +create schema db_field; +set current_schema to 'db_field'; select field(4,1,2,3,4); select field(2,1.1,2.1,3.1); @@ -18,5 +17,5 @@ select field('sdhfgs','dhgf', '2'); select field('sdhfgs',null,1,'dhgf', '2'); select field('测试',null,1,'dhgf', '2'); select field(' ','@',null,' ','',' '); -\c postgres -drop database if exists db_field; +drop schema db_field cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/find_in_set.sql b/contrib/dolphin/sql/builtin_funcs/find_in_set.sql index 24e647ed6..0be1793e1 100644 --- a/contrib/dolphin/sql/builtin_funcs/find_in_set.sql +++ b/contrib/dolphin/sql/builtin_funcs/find_in_set.sql @@ -1,6 +1,5 @@ -drop database if exists db_find_in_set; -create database db_find_in_set dbcompatibility 'B'; -\c db_find_in_set +create schema db_find_in_set; +set current_schema to 'db_find_in_set'; select find_in_set(1,'a,1,c'); select find_in_set(1,'true,1,c'); select find_in_set(1.2,'a,1.2,c'); @@ -16,5 +15,5 @@ select find_in_set('1','1,1.2,c,qwee,1212,1.1,12,qw'); select find_in_set(1,'1,1.2,c,qwee,1212,1.1,12,qw'); select find_in_set(1,'1.1,1.2,c,qwee,1212,1.1,12,1'); select find_in_set(1.1,'a,1.2,c,qwee,1212,1.1'); -\c postgres -drop database if exists db_find_in_set; +drop schema db_find_in_set cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/make_set.sql b/contrib/dolphin/sql/builtin_funcs/make_set.sql index c16ff7cf2..498dee672 100644 --- a/contrib/dolphin/sql/builtin_funcs/make_set.sql +++ b/contrib/dolphin/sql/builtin_funcs/make_set.sql @@ -1,6 +1,5 @@ -drop database if exists make_set; -create database make_set dbcompatibility 'b'; -\c make_set +create schema make_set; +set current_schema to 'make_set'; set dolphin.sql_mode = ''; select make_set(3, 'a', 'b', 'c'); select make_set(2,'a','b','c','d','e'); @@ -35,6 +34,6 @@ select make_set(-4294967296*1024*1024*1024,'1','2','3','4','5','6','7','8','9',' select make_set(3, true, false); select make_set(3,01/02/03, false, true, false); -\c postgres -drop database make_set +drop schema make_set cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/not_between.sql b/contrib/dolphin/sql/builtin_funcs/not_between.sql index 686b79c36..8c73ad3bb 100644 --- a/contrib/dolphin/sql/builtin_funcs/not_between.sql +++ b/contrib/dolphin/sql/builtin_funcs/not_between.sql @@ -1,6 +1,5 @@ -drop database if exists db_not_between; -create database db_not_between dbcompatibility 'B'; -\c db_not_between +create schema db_not_between; +set current_schema to 'db_not_between'; select 2 not between 2 and 23; select 2.1 not between 2.1 and 12.3; select true not between false and true; @@ -30,5 +29,5 @@ select b'1111111111111111111111111' not between 0 and 999999999; select 0 not between '测' and '15.2'; select 1 not between '测1' and '1'; select 1 not between '1测' and '1'; -\c postgres -drop database if exists db_not_between; \ No newline at end of file +drop schema db_not_between cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/builtin_funcs/soundex.sql b/contrib/dolphin/sql/builtin_funcs/soundex.sql index 84995e7ba..b1dfee3f6 100644 --- a/contrib/dolphin/sql/builtin_funcs/soundex.sql +++ b/contrib/dolphin/sql/builtin_funcs/soundex.sql @@ -1,6 +1,5 @@ -drop database if exists db_soundex; -create database db_soundex dbcompatibility 'B'; -\c db_soundex +create schema db_soundex; +set current_schema to 'db_soundex'; select soundex('abc'); select soundex(10); select soundex('afdsbfdlsafs'); @@ -42,5 +41,5 @@ select soundex('ш'); select soundex('я такая шчасліваяwjdkadskdjk'); select soundex('测T测h测试o测masёння я такая шчасліваяhello'); -\c postgres -drop database if exists db_soundex; +drop schema db_soundex cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/builtin_funcs/space.sql b/contrib/dolphin/sql/builtin_funcs/space.sql index 129339df7..c025fc16b 100644 --- a/contrib/dolphin/sql/builtin_funcs/space.sql +++ b/contrib/dolphin/sql/builtin_funcs/space.sql @@ -1,6 +1,5 @@ -drop database if exists db_space; -create database db_space dbcompatibility 'B'; -\c db_space +create schema db_space; +set current_schema to 'db_space'; select space('a'); select space(10); select space(-1); @@ -12,5 +11,5 @@ select space(true); select space(1,2,3); select space(2147483647111); select space(b'111'); -\c postgres -drop database if exists db_space; +drop schema db_space cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/connection_id.sql b/contrib/dolphin/sql/connection_id.sql index 704b05caa..ebcac8e8f 100644 --- a/contrib/dolphin/sql/connection_id.sql +++ b/contrib/dolphin/sql/connection_id.sql @@ -1,9 +1,8 @@ -drop database if exists test_connection_id; -create database test_connection_id dbcompatibility 'b'; -\c test_connection_id +create schema test_connection_id; +set current_schema to 'test_connection_id'; -- 测试返回连接的ID SELECT CONNECTION_ID(); -\c postgres -drop database test_connection_id; \ No newline at end of file +drop schema test_connection_id cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/conv_cast_test.sql b/contrib/dolphin/sql/conv_cast_test.sql index 1270eed67..578ae62f2 100755 --- a/contrib/dolphin/sql/conv_cast_test.sql +++ b/contrib/dolphin/sql/conv_cast_test.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists conv_cast_test; --- create database conv_cast_test dbcompatibility 'b'; -create database conv_cast_test with DBCOMPATIBILITY = 'B'; -\c conv_cast_test +create schema conv_cast_test; +set current_schema to 'conv_cast_test'; select conv(-211111111111111111111111111111111111111111111111111111111177777,10,8); select conv(-366666666666666666666666666666666666666, 10, 8); @@ -73,5 +70,5 @@ select cast(b'111111111111111111111111111111111111111111111111111111111111111' a select cast(b'11111111111111111111111111111111111111111111111111111111111111111' as unsigned); select cast(b'11111111111111111111111111111111111111111111111111111111111111111' as signed); -\c postgres -drop database conv_cast_test; +drop schema conv_cast_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/call_function.sql b/contrib/dolphin/sql/create_function_test/call_function.sql index 8b2a75336..4deeecf54 100755 --- a/contrib/dolphin/sql/create_function_test/call_function.sql +++ b/contrib/dolphin/sql/create_function_test/call_function.sql @@ -1,6 +1,6 @@ -drop database if exists db_func_call1; -create database db_func_call1 dbcompatibility 'B'; -\c db_func_call1 +create schema db_func_call1; +set current_schema to 'db_func_call1'; + CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; @@ -59,6 +59,6 @@ END; $$ LANGUAGE plpgsql; call f_3(); -\c postgres -drop database if exists db_func_call1; +drop schema db_func_call1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/commentsharp.sql b/contrib/dolphin/sql/create_function_test/commentsharp.sql index 328c5dc3c..656796a92 100644 --- a/contrib/dolphin/sql/create_function_test/commentsharp.sql +++ b/contrib/dolphin/sql/create_function_test/commentsharp.sql @@ -1,6 +1,5 @@ -drop database if exists db_comment_sharp; -create database db_comment_sharp dbcompatibility 'B'; -\c db_comment_sharp +create schema db_comment_sharp; +set current_schema to 'db_comment_sharp'; create table t1(a int,b int); @@ -155,6 +154,5 @@ drop table t_test2; drop table t_test3; -\c postgres - -drop database if exists db_comment_sharp; +drop schema db_comment_sharp cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/deterministic.sql b/contrib/dolphin/sql/create_function_test/deterministic.sql index 21dd80c3c..5f3611981 100755 --- a/contrib/dolphin/sql/create_function_test/deterministic.sql +++ b/contrib/dolphin/sql/create_function_test/deterministic.sql @@ -1,6 +1,5 @@ -drop database if exists db_func_1; -create database db_func_1 dbcompatibility 'B'; -\c db_func_1 +create schema db_func_1; +set current_schema to 'db_func_1'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int NOT DETERMINISTIC AS $$ select 1 $$ ; @@ -22,6 +21,6 @@ call f3(3); call f4(4); -\c postgres -drop database if exists db_func_1; +drop schema db_func_1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/language_sql.sql b/contrib/dolphin/sql/create_function_test/language_sql.sql index a6d8ae949..fac8993ab 100755 --- a/contrib/dolphin/sql/create_function_test/language_sql.sql +++ b/contrib/dolphin/sql/create_function_test/language_sql.sql @@ -1,6 +1,5 @@ -drop database if exists db_func_2; -create database db_func_2 dbcompatibility 'B'; -\c db_func_2 +create schema db_func_2; +set current_schema to 'db_func_2'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int AS $$ select 1 $$ ; @@ -144,5 +143,5 @@ call f4(4); -\c postgres -drop database if exists db_func_2; +drop schema db_func_2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql b/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql index 6700eed41..45192a0bb 100644 --- a/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql +++ b/contrib/dolphin/sql/create_function_test/m_type_create_proc.sql @@ -1,6 +1,5 @@ -drop database if exists m_create_proc_type; -create database m_create_proc_type dbcompatibility 'B'; -\c m_create_proc_type +create schema m_create_proc_type; +set current_schema to 'm_create_proc_type'; --test create procedure delimiter // @@ -275,7 +274,6 @@ call doempty(); -\c postgres - -drop database m_create_proc_type; +drop schema m_create_proc_type cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/single_line_proc.sql b/contrib/dolphin/sql/create_function_test/single_line_proc.sql index 3e81d412a..9f54cdda1 100644 --- a/contrib/dolphin/sql/create_function_test/single_line_proc.sql +++ b/contrib/dolphin/sql/create_function_test/single_line_proc.sql @@ -1,7 +1,5 @@ - -drop database if exists db_func_call_2; -create database db_func_call_2 dbcompatibility 'B'; -\c db_func_call_2 +create schema db_func_call_2; +set current_schema to 'db_func_call_2'; create table t1 (a int); @@ -151,7 +149,6 @@ create procedure proc33 () select z from tz; -\c regress - -drop database db_func_call_2; +drop schema db_func_call_2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/sql_options.sql b/contrib/dolphin/sql/create_function_test/sql_options.sql index d19ccb1a3..f35d33612 100755 --- a/contrib/dolphin/sql/create_function_test/sql_options.sql +++ b/contrib/dolphin/sql/create_function_test/sql_options.sql @@ -1,7 +1,5 @@ - -drop database if exists db_func_3; -create database db_func_3 dbcompatibility 'B'; -\c db_func_3 +create schema db_func_3; +set current_schema to 'db_func_3'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int CONTAINS SQL AS $$ select 1 $$ ; @@ -76,6 +74,6 @@ call f3(3); call f4(4); -\c postgres -drop database if exists db_func_3; +drop schema db_func_3 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/create_function_test/sql_security.sql b/contrib/dolphin/sql/create_function_test/sql_security.sql index a57d52c77..aa2d97100 100755 --- a/contrib/dolphin/sql/create_function_test/sql_security.sql +++ b/contrib/dolphin/sql/create_function_test/sql_security.sql @@ -1,6 +1,5 @@ -drop database if exists db_func_4; -create database db_func_4 dbcompatibility 'B'; -\c db_func_4 +create schema db_func_4; +set current_schema to 'db_func_4'; CREATE FUNCTION f1 (s CHAR(20)) RETURNS int SQL SECURITY DEFINER AS $$ select 1 $$ ; @@ -22,5 +21,5 @@ call f3(3); call f4(4); -\c postgres -drop database if exists db_func_4; +drop schema db_func_4 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/db_b_date_time_functions.sql b/contrib/dolphin/sql/db_b_date_time_functions.sql index f7de7ea41..983e9e21e 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test1; +set current_schema to 'b_datetime_func_test1'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; -- test part-one function @@ -155,5 +153,5 @@ insert into test_datetime values(sysdate(0)); insert into test_datetime values(sysdate(6)); select * from test_datetime; drop table test_datetime; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test1 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_date_time_functions2.sql b/contrib/dolphin/sql/db_b_date_time_functions2.sql index c6e398298..88100d7ea 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions2.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions2.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test2; +set current_schema to 'b_datetime_func_test2'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -612,5 +610,5 @@ insert into test values('yearweek(''0000-12-31 22:59:59.9999995'', 0)', yearweek -- 结果 select * from test order by funcname; drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_date_time_functions3.sql b/contrib/dolphin/sql/db_b_date_time_functions3.sql index 858b88750..fe8113231 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions3.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions3.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test3; +set current_schema to 'b_datetime_func_test3'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -478,5 +476,5 @@ insert into test values('addtime(''10000-1-1 00:00:00'', ''00:00:00'')', addtime select * from test order by funcname; drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_date_time_functions4.sql b/contrib/dolphin/sql/db_b_date_time_functions4.sql index 8124955fd..cb802756e 100644 --- a/contrib/dolphin/sql/db_b_date_time_functions4.sql +++ b/contrib/dolphin/sql/db_b_date_time_functions4.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists b_datetime_func_test; -create database b_datetime_func_test dbcompatibility 'b'; -\c b_datetime_func_test +create schema b_datetime_func_test4; +set current_schema to 'b_datetime_func_test4'; set datestyle = 'ISO,ymd'; set time zone "Asia/Shanghai"; create table test(funcname text, result text); @@ -376,5 +374,5 @@ insert into test values('str_to_date(''200454 Monday'', ''%X%V %W'')', str_to_da -- 结果 select * from test order by funcname; drop table test; -\c contrib_regression -DROP DATABASE b_datetime_func_test; \ No newline at end of file +drop schema b_datetime_func_test4 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_new_gram_test.sql b/contrib/dolphin/sql/db_b_new_gram_test.sql index 65f1d728f..c2dcedef4 100644 --- a/contrib/dolphin/sql/db_b_new_gram_test.sql +++ b/contrib/dolphin/sql/db_b_new_gram_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_new_gram_test; -create database db_b_new_gram_test dbcompatibility 'B'; -\c db_b_new_gram_test +create schema db_b_new_gram_test; +set current_schema to 'db_b_new_gram_test'; -- CREATE TABLE engine test CREATE TABLE test_engine_1 (a int) engine = InnoDB; @@ -389,9 +388,8 @@ SELECT COUNT(*) FROM t_ctas_new; DROP TABLE t_ctas_new; DROP TABLE t_ctas; -drop database if exists test_m; -create database test_m dbcompatibility 'b'; -\c test_m +create schema test_m; +set current_schema to 'test_m'; create table test_unique( f1 int, f2 int, @@ -470,6 +468,6 @@ select * from ignore_range_range partition (p_201901, p_201905_a); select * from ignore_range_range partition (p_201901, p_201905_b); drop table ignore_range_range; -\c postgres -drop database if exists test_m; -drop database db_b_new_gram_test; +drop schema test_m cascade; +drop schema db_b_new_gram_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/db_b_parser1.sql b/contrib/dolphin/sql/db_b_parser1.sql index c5244c9fe..2787daba8 100644 --- a/contrib/dolphin/sql/db_b_parser1.sql +++ b/contrib/dolphin/sql/db_b_parser1.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser1; -create database db_b_parser1 dbcompatibility 'b'; -\c db_b_parser1 +create schema db_b_parser1; +set current_schema to 'db_b_parser1'; select 'bbbbb' regexp '^([bc])\1*$' as t, 'bbbbb' not regexp '^([bc])\1*$' as t2, 'bbbbb' rlike '^([bc])\1*$' as t; select 'ccc' regexp '^([bc])\1*$' as t, 'ccc' not regexp '^([bc])\1*$' as t2, 'ccc' rlike '^([bc])\1*$' as t; select 'xxx' regexp '^([bc])\1*$' as f, 'xxx' not regexp '^([bc])\1*$' as f2, 'xxx' rlike '^([bc])\1*$' as f; @@ -64,5 +63,5 @@ select '-12.3abc' rlike true; select '-12.3abc' rlike false; select '-12.3abc' rlike 'null'; -\c postgres -drop database if exists db_b_parser1; \ No newline at end of file +drop schema db_b_parser1 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_parser2.sql b/contrib/dolphin/sql/db_b_parser2.sql index e1727aec2..78d462fd0 100644 --- a/contrib/dolphin/sql/db_b_parser2.sql +++ b/contrib/dolphin/sql/db_b_parser2.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser2; -create database db_b_parser2 dbcompatibility 'b'; -\c db_b_parser2 +create schema db_b_parser2; +set current_schema to 'db_b_parser2'; --验证DAYOFMONTH() DAYOFWEEK() DAYOFYEAR() HOUR() MICROSECOND() MINUTE() QUARTER() SECOND() WEEKDAY() WEEKOFYEAR() YEAR() select DAYOFMONTH(datetime '2021-11-4 16:30:44.341191'); @@ -159,5 +158,5 @@ insert into fchar_test values('零一二三四五六七八九十'); select fchar,length(fchar) from fchar_test order by 1,2; drop table fchar_test; -\c postgres -drop database if exists db_b_parser2; \ No newline at end of file +drop schema db_b_parser2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_parser3.sql b/contrib/dolphin/sql/db_b_parser3.sql index 3f2bcbcc6..6788c2444 100644 --- a/contrib/dolphin/sql/db_b_parser3.sql +++ b/contrib/dolphin/sql/db_b_parser3.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser3; -create database db_b_parser3 dbcompatibility 'b'; -\c db_b_parser3 +create schema db_b_parser3; +set current_schema to 'db_b_parser3'; --测试点一:验证lcase函数 select lcase('ABc'), lcase('哈哈'), lcase('123456'),lcase('哈市&%%¥#'),lcase(null); @@ -118,5 +117,5 @@ select acos(11); select acos(1.000001); select acos(-1.000001); -\c postgres -drop database if exists db_b_parser3; \ No newline at end of file +drop schema db_b_parser3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_parser4.sql b/contrib/dolphin/sql/db_b_parser4.sql index 8bdd7a6c9..ded92180b 100644 --- a/contrib/dolphin/sql/db_b_parser4.sql +++ b/contrib/dolphin/sql/db_b_parser4.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_parser4; -create database db_b_parser4 dbcompatibility 'b'; -\c db_b_parser4 +create schema db_b_parser4; +set current_schema to 'db_b_parser4'; --验证text类型 drop table if exists tb_db_b_parser_0001; create table tb_db_b_parser_0001(a text(10),b tinytext,c mediumtext,d longtext); @@ -30,5 +29,5 @@ drop table if exists tb_default_double; drop table if exists tb_real_float; -\c postgres -drop database if exists db_b_parser4; \ No newline at end of file +drop schema db_b_parser4 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_plpgsql_test.sql b/contrib/dolphin/sql/db_b_plpgsql_test.sql index 209788734..ec0a980ab 100644 --- a/contrib/dolphin/sql/db_b_plpgsql_test.sql +++ b/contrib/dolphin/sql/db_b_plpgsql_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_plpgsql_test; -create database db_b_plpgsql_test dbcompatibility 'b'; -\c db_b_plpgsql_test +create schema db_b_plpgsql_test; +set current_schema to 'db_b_plpgsql_test'; create table tb_b_grammar_0038(a text(10)) engine = 表1; @@ -44,5 +43,5 @@ end; SELECT * from proc_01(); SELECT * from tb_b_grammar_0038; -\c postgres -drop database if exists db_b_plpgsql_test; \ No newline at end of file +drop schema db_b_plpgsql_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/db_b_rename_user_test.sql b/contrib/dolphin/sql/db_b_rename_user_test.sql index 1334cbba6..82e532ff8 100644 --- a/contrib/dolphin/sql/db_b_rename_user_test.sql +++ b/contrib/dolphin/sql/db_b_rename_user_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_rename_user_test; -create database db_b_rename_user_test dbcompatibility 'b'; -\c db_b_rename_user_test +create schema db_b_rename_user_test; +set current_schema to 'db_b_rename_user_test'; CREATE USER user1 WITH ENCRYPTED PASSWORD 'user1@1234'; CREATE USER user2 WITH ENCRYPTED PASSWORD 'user2@1234'; @@ -35,5 +34,5 @@ drop user user4; drop user user5; drop user user6; -\c postgres -drop database if exists db_b_rename_user_test; \ No newline at end of file +drop schema db_b_rename_user_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/default_guc.sql b/contrib/dolphin/sql/default_guc.sql index 1fa1aae46..9213fb5d3 100644 --- a/contrib/dolphin/sql/default_guc.sql +++ b/contrib/dolphin/sql/default_guc.sql @@ -1,6 +1,5 @@ -drop database if exists default_guc; -create database default_guc dbcompatibility 'b'; -\c default_guc +create schema default_guc; +set current_schema to 'default_guc'; show behavior_compat_options; select 0.123; @@ -13,5 +12,5 @@ select 0.123; select md5(0.123); select md5('0.123'); -\c postgres -drop database if exists default_guc; \ No newline at end of file +drop schema default_guc cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/describe.sql b/contrib/dolphin/sql/describe.sql index 3cfed17de..e1234a4be 100644 --- a/contrib/dolphin/sql/describe.sql +++ b/contrib/dolphin/sql/describe.sql @@ -1,6 +1,5 @@ -drop database if exists db_describe; -create database db_describe dbcompatibility 'b'; -\c db_describe +create schema db_describe; +set current_schema to 'db_describe'; CREATE TABLE test2 ( id int PRIMARY KEY @@ -51,9 +50,9 @@ primary key (a) ); desc test; describe test; -desc public.test; +desc db_describe.test; desc sc.test; desc public.test4; desc sc.test4; -\c postgres -drop database if exists db_describe; \ No newline at end of file +drop schema db_describe cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/empty_value_lists.sql b/contrib/dolphin/sql/empty_value_lists.sql index a7fb2799d..4f5dd8d40 100644 --- a/contrib/dolphin/sql/empty_value_lists.sql +++ b/contrib/dolphin/sql/empty_value_lists.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists empty_value_lists; -create database empty_value_lists dbcompatibility 'b'; - -\c empty_value_lists +create schema empty_value_lists; +set current_schema to 'empty_value_lists'; create table test1(num int); create table test2(num int default 3); @@ -191,5 +188,5 @@ select * from m3; insert into m4 values(),(); select * from m4; -\c postgres -drop database if exists empty_value_lists; +drop schema empty_value_lists cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/empty_value_support_value.sql b/contrib/dolphin/sql/empty_value_support_value.sql index 3317e5899..d0e2fb915 100644 --- a/contrib/dolphin/sql/empty_value_support_value.sql +++ b/contrib/dolphin/sql/empty_value_support_value.sql @@ -1,7 +1,5 @@ -drop database if exists empty_value_support_value; -create database empty_value_support_value dbcompatibility 'b'; - -\c empty_value_support_value +create schema empty_value_support_value; +set current_schema to 'empty_value_support_value'; create table test1(num int not null); insert into test1 value(); insert into test1 value(),(); @@ -11,5 +9,5 @@ select * from test1; insert into test1 value(),(); select * from test1; -\c postgres -drop database if exists empty_value_support_value; \ No newline at end of file +drop schema empty_value_support_value cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/explain_desc.sql b/contrib/dolphin/sql/explain_desc.sql index e6a676134..e13527590 100644 --- a/contrib/dolphin/sql/explain_desc.sql +++ b/contrib/dolphin/sql/explain_desc.sql @@ -1,12 +1,12 @@ -create database db_explain_desc with dbcompatibility 'B'; -\c db_explain_desc +create schema db_explain_desc; +set current_schema to 'db_explain_desc'; create table ed_t(c1 int, c2 varchar(100), c3 int default 10); insert into ed_t values(generate_series(1, 10), 'hello', 100); -- 1.use explain to query table's info explain ed_t; -explain public.ed_t; +explain db_explain_desc.ed_t; -- 2.use desc to query plan info desc select c1, c2, c3 from ed_t; @@ -44,6 +44,5 @@ explain format='TraDitional' delete from ed_t where c1 < 5; drop table ed_t; -\c postgres - -drop database db_explain_desc; +drop schema db_explain_desc cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/export_set.sql b/contrib/dolphin/sql/export_set.sql index 5031a2af8..01706a27b 100644 --- a/contrib/dolphin/sql/export_set.sql +++ b/contrib/dolphin/sql/export_set.sql @@ -1,6 +1,5 @@ -drop database if exists export_set; -create database export_set dbcompatibility 'b'; -\c export_set +create schema export_set; +set current_schema to 'export_set'; -- 测试缺省值 SELECT EXPORT_SET(5,'Y','N',',',5); @@ -33,5 +32,5 @@ SELECT EXPORT_SET(5,'YYYYYYYYYYYYYYYY','N',',',5); SELECT EXPORT_SET(5,'Y','NNNNNNNNNNNNNNN',',',5); SELECT EXPORT_SET(5,'Y','N',',,,,,,,,,,,,',5); -\c postgres -drop database if exists export_set; \ No newline at end of file +drop schema export_set cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql b/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql index 35579f8c0..a1357df2e 100644 --- a/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql +++ b/contrib/dolphin/sql/float_numeric_test/db_b_float_round_test.sql @@ -1,4 +1,7 @@ -- the test case of A format +drop database if exists test; +create database test dbcompatibility 'A'; +\c test SELECT 2.5::float4::int1; SELECT 2.5::float8::int1; SELECT 2.5::int1; @@ -28,10 +31,12 @@ SELECT 3.5::int8; SELECT dround(2.5); SELECT dround(3.5); +\c contrib_regression +drop database test; + -- the test case of dolphin plugin -drop database if exists test; -create database test dbcompatibility 'B'; -\c test +create schema test; +set current_schema to 'test'; SELECT 2.5::float4::int1; SELECT 2.5::float8::int1; @@ -62,5 +67,5 @@ SELECT 3.5::int8; SELECT dround(2.5); SELECT dround(3.5); -\c postgres -drop database test; +drop schema test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql b/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql index 40b303537..14fab25c5 100644 --- a/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql +++ b/contrib/dolphin/sql/float_numeric_test/db_b_log_test.sql @@ -1,8 +1,15 @@ -SELECT LOG(10); drop database if exists db_b_log_test; -create database db_b_log_test dbcompatibility 'B'; +create database db_b_log_test dbcompatibility 'A'; \c db_b_log_test +SELECT LOG(10); + +\c contrib_regression +drop database db_b_log_test; + +create schema db_b_log_test; +set current_schema to 'db_b_log_test'; + SELECT LOG(10); SELECT LOG10(100); SELECT LOG2(64); @@ -47,5 +54,5 @@ select log(b'111'::int); select log2(b'111'::int); select log10(b'111'::int); -\c postgres -drop database db_b_log_test; +drop schema db_b_log_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql b/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql index a78b7ec7e..444604ae4 100644 --- a/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql +++ b/contrib/dolphin/sql/float_numeric_test/db_b_sqrt_test.sql @@ -1,9 +1,15 @@ +drop database if exists db_b_sqrt_test; +create database db_b_sqrt_test dbcompatibility 'A'; +\c db_b_sqrt_test + SELECT SQRT(64); SELECT SQRT(-64); -drop database if exists db_b_sqrt_test; -create database db_b_sqrt_test dbcompatibility 'B'; -\c db_b_sqrt_test +\c contrib_regression +drop database db_b_sqrt_test; + +create schema db_b_sqrt_test; +set current_schema to 'db_b_sqrt_test'; SELECT SQRT(64); SELECT SQRT(-64); @@ -16,5 +22,5 @@ select sqrt(b'111'); select sqrt(7); select sqrt(b'111'::int); -\c postgres -drop database db_b_sqrt_test; +drop schema db_b_sqrt_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/flush.sql b/contrib/dolphin/sql/flush.sql index df630c453..ee5a8023e 100644 --- a/contrib/dolphin/sql/flush.sql +++ b/contrib/dolphin/sql/flush.sql @@ -1,6 +1,5 @@ -drop database if exists db_flush; -create database db_flush dbcompatibility 'b'; -\c db_flush +create schema db_flush; +set current_schema to 'db_flush'; FLUSH BINARY LOGS; -\c postgres -drop database if exists db_flush; +drop schema db_flush cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/get_b_database.sql b/contrib/dolphin/sql/get_b_database.sql index d40ba04d4..6cc8f4cb4 100644 --- a/contrib/dolphin/sql/get_b_database.sql +++ b/contrib/dolphin/sql/get_b_database.sql @@ -1,6 +1,5 @@ -drop database if exists get_db; -create database get_db dbcompatibility 'b'; -\c get_db +create schema get_db; +set current_schema to 'get_db'; select database(); create schema testdb; use testdb; @@ -9,5 +8,5 @@ create schema testdb1; select database(); use testdb1; select database(); -\c postgres -drop database if exists get_db; +drop schema get_db cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/greatest_least.sql b/contrib/dolphin/sql/greatest_least.sql index f9633f1cc..2dd043c0f 100644 --- a/contrib/dolphin/sql/greatest_least.sql +++ b/contrib/dolphin/sql/greatest_least.sql @@ -1,6 +1,5 @@ -drop database if exists greatest_least; -create database greatest_least dbcompatibility 'b'; -\c greatest_least +create schema greatest_least; +set current_schema to 'greatest_least'; --return null if input include null select GREATEST(null,1,2), GREATEST(null,1,2) is null; select GREATEST(1,2); @@ -8,5 +7,5 @@ select GREATEST(1,2); select LEAST(null,1,2), LEAST(null,1,2) is null; select LEAST(1,2); -\c postgres -drop database if exists greatest_least; +drop schema greatest_least cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/group_concat_test.sql b/contrib/dolphin/sql/group_concat_test.sql index 2cf9db19b..3887d46c9 100644 --- a/contrib/dolphin/sql/group_concat_test.sql +++ b/contrib/dolphin/sql/group_concat_test.sql @@ -1,5 +1,5 @@ -create database t dbcompatibility 'B'; -\c t; +create schema t; +set current_schema to 't'; create table t(id text, v text); insert into t(id, v) values('1','a'),('2','b'),('1','c'),('2','d'); @@ -10,5 +10,5 @@ select * from tmp_table; set explain_perf_mode=pretty; explain verbose select id, group_concat(VARIADIC ARRAY[id,':',v] order by id) as title from t group by id; -\c postgres -drop database t; +drop schema t cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/if_not_exists_test.sql b/contrib/dolphin/sql/if_not_exists_test.sql index 359ca7a18..41cd131b6 100644 --- a/contrib/dolphin/sql/if_not_exists_test.sql +++ b/contrib/dolphin/sql/if_not_exists_test.sql @@ -1,6 +1,5 @@ -drop database if exists test_if_not_exists; -create database test_if_not_exists dbcompatibility 'B'; -\c test_if_not_exists +create schema test_if_not_exists; +set current_schema to 'test_if_not_exists'; CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; CREATE USER ZZZ WITH PASSWORD 'openGauss@123'; @@ -8,5 +7,5 @@ CREATE USER IF NOT EXISTS ZZZ WITH PASSWORD 'openGauss@123'; DROP USER ZZZ; CREATE USER IF NOT EXISTS ZZZ WITH PASSWORD 'openGauss@123'; -\c postgres -drop database test_if_not_exists; +drop schema test_if_not_exists cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/implicit_cast.sql b/contrib/dolphin/sql/implicit_cast.sql index 9ec07b4d3..b179db7ff 100644 --- a/contrib/dolphin/sql/implicit_cast.sql +++ b/contrib/dolphin/sql/implicit_cast.sql @@ -1,6 +1,5 @@ -drop database if exists implicit_cast; -create database implicit_cast dbcompatibility 'b'; -\c implicit_cast +create schema implicit_cast; +set current_schema to 'implicit_cast'; select 1::int1 % 1::float4; select 1::int2 % 1::float4; @@ -62,5 +61,5 @@ select 1::int2 | 1::text; select 1::int4 | 1::text; select 1::int8 | 1::text; -\c postgres -drop database if exists implicit_cast; \ No newline at end of file +drop schema implicit_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/insert_set.sql b/contrib/dolphin/sql/insert_set.sql index b921a0466..b58257333 100644 --- a/contrib/dolphin/sql/insert_set.sql +++ b/contrib/dolphin/sql/insert_set.sql @@ -1,6 +1,5 @@ -drop database if exists insert_set; -create database insert_set dbcompatibility 'B'; -\c insert_set +create schema insert_set; +set current_schema to 'insert_set'; create table test_figure(tinyint tinyint, smallint smallint, integer integer, binary_integer binary_integer, bigint bigint); insert into test_figure set bigint = 7234134, binary_integer = 1011101, integer = 10000, smallint = 1, tinyint = 3; @@ -40,5 +39,5 @@ select * from test_error; insert into test_error set name = 23; select * from test_error; -\c postgres -drop database insert_set; +drop schema insert_set cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/join_without_on.sql b/contrib/dolphin/sql/join_without_on.sql index 9a00342a0..37a78999f 100644 --- a/contrib/dolphin/sql/join_without_on.sql +++ b/contrib/dolphin/sql/join_without_on.sql @@ -1,6 +1,5 @@ -drop database if exists join_without_on; -create database join_without_on dbcompatibility 'b'; -\c join_without_on +create schema join_without_on; +set current_schema to 'join_without_on'; CREATE TABLE J1_TBL ( i integer, @@ -61,5 +60,5 @@ SELECT * FROM J1_TBL JOIN J2_TBL JOIN J3_TBL ON J1_TBL.i = J3_TBL.i; SELECT * FROM J1_TBL JOIN J2_TBL JOIN J3_TBL JOIN J4_TBL ON J1_TBL.i = J4_TBL.i; SELECT * FROM J1_TBL JOIN J2_TBL INNER JOIN J3_TBL INNER JOIN J4_TBL ON J1_TBL.i = J4_TBL.i; -\c postgres -drop database if exists join_without_on; \ No newline at end of file +drop schema join_without_on cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_array.sql b/contrib/dolphin/sql/json_array.sql index 0b9dc13a3..f794eec1a 100644 --- a/contrib/dolphin/sql/json_array.sql +++ b/contrib/dolphin/sql/json_array.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_array; -create database test_json_array dbcompatibility 'B'; -\c test_json_array +create schema test_json_array; +set current_schema to 'test_json_array'; select json_array(1,2,3,4); select json_array(1,3,4); @@ -19,5 +18,5 @@ insert into dataa(name) value(json_array('sjy')); select name from dataa; drop table dataa; -\c postgres -drop database if exists test_json_array; \ No newline at end of file +drop schema test_json_array cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_array_append.sql b/contrib/dolphin/sql/json_array_append.sql index 1486501f6..92635fa1f 100644 --- a/contrib/dolphin/sql/json_array_append.sql +++ b/contrib/dolphin/sql/json_array_append.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_array_append; -create database test_json_array_append dbcompatibility'B'; -\c test_json_array_append +create schema test_json_array_append; +set current_schema to 'test_json_array_append'; select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[1]', 4); select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[0]', 4); @@ -37,5 +36,5 @@ select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[*]', 4); select JSON_ARRAY_APPEND('[1, [2, 3]]', '$[*]', 日); select JSON_ARRAY_APPEND('[1, [2, 3]]', ' ', 4); -\c postgres -drop database if exists test_json_array_append; \ No newline at end of file +drop schema test_json_array_append cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_array_insert.sql b/contrib/dolphin/sql/json_array_insert.sql index a8440e389..361eda9f3 100644 --- a/contrib/dolphin/sql/json_array_insert.sql +++ b/contrib/dolphin/sql/json_array_insert.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_array_insert; -create database test_json_array_insert dbcompatibility 'B'; -\c test_json_array_insert +create schema test_json_array_insert; +set current_schema to 'test_json_array_insert'; SELECT JSON_ARRAY_INSERT('[1, [2, 3], {"a": [4, 5]}]', '$[0]', 0); SELECT JSON_ARRAY_INSERT('[1, [2, 3], {"a": [4, 5]}]', '$[2]', 4); @@ -45,5 +44,5 @@ SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', '$..1', 4); SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', '$[*]', 4); SELECT JSON_ARRAY_INSERT('[1, [2, 3]]', ' ', 4); -\c postgres -drop database if exists test_json_array_insert; \ No newline at end of file +drop schema test_json_array_insert cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_arrayagg.sql b/contrib/dolphin/sql/json_arrayagg.sql index 8a575ead4..163cbbd44 100644 --- a/contrib/dolphin/sql/json_arrayagg.sql +++ b/contrib/dolphin/sql/json_arrayagg.sql @@ -1,6 +1,5 @@ -drop database if exists json_arrayagg_test; -create database json_arrayagg_test dbcompatibility 'B'; -\c json_arrayagg_test +create schema json_arrayagg_test; +set current_schema to 'json_arrayagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -48,5 +47,5 @@ insert into time_table values(20221204, 3); select json_arrayagg(b) from time_table; select json_arrayagg(a) from time_table; -\c postgres -drop database json_arrayagg_test; +drop schema json_arrayagg_test cascade; +reset json_arrayagg_test; diff --git a/contrib/dolphin/sql/json_contains.sql b/contrib/dolphin/sql/json_contains.sql index f55312267..32ff372f0 100644 --- a/contrib/dolphin/sql/json_contains.sql +++ b/contrib/dolphin/sql/json_contains.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_contains; -create database test_json_contains dbcompatibility 'b'; -\c test_json_contains +create schema test_json_contains; +set current_schema to 'test_json_contains'; select json_contains('1',null); select json_contains(null,'1'); @@ -87,5 +86,5 @@ insert into json_contains_test values('[1,2,3,4]','[2,4]','$'); select *, json_contains(target, candidate, path) from json_contains_test; drop table json_contains_test; -\c postgres; -drop database if exists test_json_contains; \ No newline at end of file +drop schema test_json_contains cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_contains_path.sql b/contrib/dolphin/sql/json_contains_path.sql index e41aed801..b40af56f2 100644 --- a/contrib/dolphin/sql/json_contains_path.sql +++ b/contrib/dolphin/sql/json_contains_path.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_contains_path; -create database test_json_contains_path dbcompatibility 'b'; -\c test_json_contains_path +create schema test_json_contains_path; +set current_schema to 'test_json_contains_path'; select json_contains_path(null,'one','$[0]'); select json_contains_path('[1,2,3]',null,'$[0]'); @@ -40,5 +39,5 @@ insert into json_contains_path_test values('{"a": 1, "b": 2, "c": {"d": 4}}', 'a select *, json_contains_path(target, mode, '$.a.d', '$.c.d') from json_contains_path_test; drop table json_contains_path_test; -\c postgres; -drop database if exists test_json_contains_path; \ No newline at end of file +drop schema test_json_contains_path cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_depth.sql b/contrib/dolphin/sql/json_depth.sql index b6ef54c91..63b2cb4cf 100644 --- a/contrib/dolphin/sql/json_depth.sql +++ b/contrib/dolphin/sql/json_depth.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_depth; -create database test_json_depth dbcompatibility 'B'; -\c test_json_depth +create schema test_json_depth; +set current_schema to 'test_json_depth'; select json_depth('{}'); select json_depth('[]'); @@ -32,5 +31,5 @@ select *from test1; select json_depth(data) from test1; drop table test1; -\c postgres -drop database if exists test_json_depth; \ No newline at end of file +drop schema test_json_depth cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_extract.sql b/contrib/dolphin/sql/json_extract.sql index 92708000d..6a4b30d9f 100644 --- a/contrib/dolphin/sql/json_extract.sql +++ b/contrib/dolphin/sql/json_extract.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_extract; -create database test_json_extract dbcompatibility'B'; -\c test_json_extract +create schema test_json_extract; +set current_schema to 'test_json_extract'; select json_extract('{"a": "lihua"}', '$.a'); select json_extract('{"a"}', '$.a'); @@ -31,5 +30,5 @@ insert into test values (json_extract('{"a": 43, "b": {"c": true}}', '$.b')); select * from test; -\c postgres -drop database if exists test_json_extract; \ No newline at end of file +drop schema test_json_extract cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_insert.sql b/contrib/dolphin/sql/json_insert.sql index bdd23b53e..d29879ef7 100644 --- a/contrib/dolphin/sql/json_insert.sql +++ b/contrib/dolphin/sql/json_insert.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_insert; -create database test_json_insert dbcompatibility'B'; -\c test_json_insert +create schema test_json_insert; +set current_schema to 'test_json_insert'; -- test for basic functionality of json_replace select JSON_INSERT('{"a": 43}', '$.b', 55); @@ -75,5 +74,5 @@ insert into test values (JSON_INSERT('{"a": 43, "b": {"c": true}}', '$.b[4]', 'Test')); select * from test; -\c postgres -drop database if exists test_json_insert; \ No newline at end of file +drop schema test_json_insert cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_keys.sql b/contrib/dolphin/sql/json_keys.sql index c9e41353a..9f04e729d 100644 --- a/contrib/dolphin/sql/json_keys.sql +++ b/contrib/dolphin/sql/json_keys.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_keys; -create database test_json_keys dbcompatibility'B'; -\c test_json_keys +create schema test_json_keys; +set current_schema to 'test_json_keys'; SELECT JSON_KEYS('{"a":"t1"}'); SELECT JSON_KEYS('{"a":"t1","b":"t2"}'); @@ -31,5 +30,5 @@ create table student(name json); insert into student (name) value(json_keys('{"a":123,"b":{"c":"qwe"}}')); select name from student; -\c postgres -drop database if exists test_json_keys; \ No newline at end of file +drop schema test_json_keys cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_length.sql b/contrib/dolphin/sql/json_length.sql index 58fbccf70..238e655dc 100644 --- a/contrib/dolphin/sql/json_length.sql +++ b/contrib/dolphin/sql/json_length.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_length; -create database test_json_length dbcompatibility 'B'; -\c test_json_length +create schema test_json_length; +set current_schema to 'test_json_length'; select json_length(NULL); select json_length('NULL'); @@ -52,5 +51,5 @@ insert into test values select * from test; drop table test; -\c postgres; -drop database if exists test_json_length; +drop schema test_json_length cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_merge_patch.sql b/contrib/dolphin/sql/json_merge_patch.sql index 63315a582..24b3778e6 100644 --- a/contrib/dolphin/sql/json_merge_patch.sql +++ b/contrib/dolphin/sql/json_merge_patch.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_merge_patch; -create database test_json_merge_patch dbcompatibility 'B'; -\c test_json_merge_patch +create schema test_json_merge_patch; +set current_schema to 'test_json_merge_patch'; select json_merge_patch(NULL); select json_merge_patch(NULL,NULL); @@ -75,5 +74,5 @@ insert into test1 values json_merge_patch('[{"a":"abc"},"bcd"]','{"1":"jks"}')); select * from test1; -\c postgres; -drop database if exists test_json_merge_patch; \ No newline at end of file +drop schema test_json_merge_patch cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_merge_preserve.sql b/contrib/dolphin/sql/json_merge_preserve.sql index b97eb59e6..6ca736134 100644 --- a/contrib/dolphin/sql/json_merge_preserve.sql +++ b/contrib/dolphin/sql/json_merge_preserve.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_merge_preserve; -create database test_json_merge_preserve dbcompatibility 'B'; -\c test_json_merge_preserve +create schema test_json_merge_preserve; +set current_schema to 'test_json_merge_preserve'; select json_merge_preserve(NULL); select json_merge_preserve(NULL,NULL); @@ -75,5 +74,5 @@ insert into test1 values json_merge_preserve('[{"a":"abc"},"bcd"]','{"1":"jks"}')); select * from test1; -\c postgres; -drop database if exists test_json_merge_preserve; \ No newline at end of file +drop schema test_json_merge_preserve cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_object.sql b/contrib/dolphin/sql/json_object.sql index 879721389..a61e73efa 100644 --- a/contrib/dolphin/sql/json_object.sql +++ b/contrib/dolphin/sql/json_object.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_object; -create database test_json_object dbcompatibility 'B'; -\c test_json_object +create schema test_json_object; +set current_schema to 'test_json_object'; -- test for b_compatibility_mode = false select json_object('{a,1,b,2,3,NULL,"d e f","a b c"}'); @@ -111,5 +110,5 @@ set dolphin.b_compatibility_mode = 0; select json_object('{a,1,b,2,3,NULL,"d e f","a b c"}'); select json_object('{a,b,"a b c"}', '{a,1,1}'); -\c postgres -drop database if exists test_json_object; \ No newline at end of file +drop schema test_json_object cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_objectagg.sql b/contrib/dolphin/sql/json_objectagg.sql index 1fdf43904..8751bc3c0 100644 --- a/contrib/dolphin/sql/json_objectagg.sql +++ b/contrib/dolphin/sql/json_objectagg.sql @@ -1,6 +1,5 @@ -drop database if exists json_objectagg_test; -create database json_objectagg_test dbcompatibility 'B'; -\c json_objectagg_test +create schema json_objectagg_test; +set current_schema to 'json_objectagg_test'; -- create table for test create table City(District varchar(30), Name varchar(30), Population int); insert into City values ('Capital Region','Canberra',322723); @@ -42,5 +41,5 @@ insert into time_table values(20211001, 2); insert into time_table values(20221204, 3); select json_objectagg(b, a) from time_table; -\c postgres -drop database json_objectagg_test; +drop schema json_objectagg_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_operator.sql b/contrib/dolphin/sql/json_operator.sql index 6966dc3f0..66a688bf1 100644 --- a/contrib/dolphin/sql/json_operator.sql +++ b/contrib/dolphin/sql/json_operator.sql @@ -1,6 +1,5 @@ -drop database if exists test_operator; -create database test_operator dbcompatibility 'B'; -\c test_operator +create schema test_operator; +set current_schema to 'test_operator'; drop table if exists test1; create table test1(data json); @@ -30,6 +29,6 @@ select data->>'b' from test2; select data->>'c' from test2; -\c postgres -drop database test_operator; +drop schema test_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_pretty.sql b/contrib/dolphin/sql/json_pretty.sql index b8b9d61d4..e5e81a919 100644 --- a/contrib/dolphin/sql/json_pretty.sql +++ b/contrib/dolphin/sql/json_pretty.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_pretty; -create database test_json_pretty dbcompatibility'B'; -\c test_json_pretty +create schema test_json_pretty; +set current_schema to 'test_json_pretty'; -- test for basic functionality of json_replace select JSON_PRETTY('{"a": 43}'); @@ -65,5 +64,5 @@ select JSON_PRETTY(textjson) from test; insert into test values (JSON_PRETTY('{"a": 43, "b": {"c": true}}')); select * from test; -\c postgres -drop database test_json_pretty; +drop schema test_json_pretty cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_quote.sql b/contrib/dolphin/sql/json_quote.sql index efe23878d..f6dc1bb44 100644 --- a/contrib/dolphin/sql/json_quote.sql +++ b/contrib/dolphin/sql/json_quote.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_quote; -create database test_json_quote dbcompatibility'B'; -\c test_json_quote +create schema test_json_quote; +set current_schema to 'test_json_quote'; select json_quote(E'a\tb'); select json_quote('a b'); @@ -21,5 +20,5 @@ insert into student (name) value(json_quote('lc')); select name from student; drop table student; -\c postgres -drop database test_json_quote; +drop schema test_json_quote cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_remove.sql b/contrib/dolphin/sql/json_remove.sql index 27112bcc0..ab9edf823 100644 --- a/contrib/dolphin/sql/json_remove.sql +++ b/contrib/dolphin/sql/json_remove.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_remove; -create database test_json_remove dbcompatibility'B'; -\c test_json_remove +create schema test_json_remove; +set current_schema to 'test_json_remove'; SELECT JSON_REMOVE('[0, 1, 2, [3, 4]]', '$[0]', '$[2]'); SELECT JSON_REMOVE('{"x": 1, "y": 2}', '$.x'); @@ -30,5 +29,5 @@ insert into info1 values ('{"x": {"z":2,"a":3}, "y": 2}', '$.x.z'); insert into tab_json1 SELECT JSON_REMOVE(name,address) from info1; select * from tab_json1; -\c postgres -drop database if exists test_json_remove; \ No newline at end of file +drop schema test_json_remove cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_replace.sql b/contrib/dolphin/sql/json_replace.sql index 407335ef5..690d8cd57 100644 --- a/contrib/dolphin/sql/json_replace.sql +++ b/contrib/dolphin/sql/json_replace.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_replace; -create database test_json_replace dbcompatibility 'B'; -\c test_json_replace +create schema test_json_replace; +set current_schema to 'test_json_replace'; -- test for basic functionality of json_replace SELECT JSON_REPLACE('{"a": 1, "b": 2, "c": 3}', '$.b', 9); @@ -70,5 +69,5 @@ SELECT JSON_REPLACE('x','a',3,true); -- test for invalid json document SELECT JSON_REPLACE('x',2,2); -\c postgres -drop database if exists test_json_replace; \ No newline at end of file +drop schema test_json_replace cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_search.sql b/contrib/dolphin/sql/json_search.sql index 7224fc6cf..d40e96c4e 100644 --- a/contrib/dolphin/sql/json_search.sql +++ b/contrib/dolphin/sql/json_search.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_search; -create database test_json_search dbcompatibility'B'; -\c test_json_search +create schema test_json_search; +set current_schema to 'test_json_search'; select json_search('null','one','null','&','$'); select json_search(null,'one','null','&','$'); @@ -191,5 +190,5 @@ insert into json_search_test values select * from json_search_test; drop table json_search_test; -\c postgres; -drop database if exists test_json_search; \ No newline at end of file +drop schema test_json_search cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_set.sql b/contrib/dolphin/sql/json_set.sql index 11e3278e7..3af1ce7c1 100644 --- a/contrib/dolphin/sql/json_set.sql +++ b/contrib/dolphin/sql/json_set.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_set; -create database test_json_set dbcompatibility 'B'; -\c test_json_set +create schema test_json_set; +set current_schema to 'test_json_set'; select json_set('{"1":2}','$."1"',6); select json_set('{"1":2,"b":"r","q":0}','$.b',6); @@ -27,5 +26,5 @@ insert into dataa (name) value(json_set('{"s":1}','$.s',3,'$.w',5)); select name from dataa; drop table dataa; -\c postgres -drop database if exists test_json_set; \ No newline at end of file +drop schema test_json_set cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_storage_size.sql b/contrib/dolphin/sql/json_storage_size.sql index 70dca43df..3ba1621e3 100644 --- a/contrib/dolphin/sql/json_storage_size.sql +++ b/contrib/dolphin/sql/json_storage_size.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_storage_size; -create database test_json_storage_size dbcompatibility'B'; -\c test_json_storage_size +create schema test_json_storage_size; +set current_schema to 'test_json_storage_size'; set enable_set_variable_b_format to on; @@ -72,6 +71,5 @@ FROM SELECT JSON_STORAGE_SIZE('{0,1}'); -\c postgres - -drop database test_json_storage_size +drop schema test_json_storage_size cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_type.sql b/contrib/dolphin/sql/json_type.sql index cafe66d93..0257bcc7d 100644 --- a/contrib/dolphin/sql/json_type.sql +++ b/contrib/dolphin/sql/json_type.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_type; -create database test_json_type dbcompatibility'B'; -\c test_json_type +create schema test_json_type; +set current_schema to 'test_json_type'; ---string @@ -59,5 +58,5 @@ insert into test_type values('{"a":1}',3,null,null); select json_type(t1) from test_type; -\c postgres -drop database test_json_type; +drop schema test_json_type cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/json_unquote.sql b/contrib/dolphin/sql/json_unquote.sql index f43ec9f5b..3a41cac62 100644 --- a/contrib/dolphin/sql/json_unquote.sql +++ b/contrib/dolphin/sql/json_unquote.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_unquote; -create database test_json_unquote dbcompatibility 'B'; -\c test_json_unquote +create schema test_json_unquote; +set current_schema to 'test_json_unquote'; select json_unquote('"abc"'); select json_unquote('abc'); @@ -32,5 +31,5 @@ insert into data (name) value(json_unquote('"sjy"')); select name from data; drop table data; -\c postgres -drop database if exists test_json_unquote; \ No newline at end of file +drop schema test_json_unquote cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/json_valid.sql b/contrib/dolphin/sql/json_valid.sql index 34ef52749..a7147bd72 100644 --- a/contrib/dolphin/sql/json_valid.sql +++ b/contrib/dolphin/sql/json_valid.sql @@ -1,6 +1,5 @@ -drop database if exists test_json_valid; -create database test_json_valid dbcompatibility 'b'; -\c test_json_valid +create schema test_json_valid; +set current_schema to 'test_json_valid'; select json_valid(NULL); @@ -221,7 +220,7 @@ select target, json_valid(target) from json_valid_test; drop table json_valid_test; -\c postgres -drop database if exists test_json_valid; +drop schema test_json_valid cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql index da92db474..2f6527864 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_invalid_input.sql @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_invalid_input_test dbcompatibility 'B'; -\c sql_ignore_invalid_input_test; +create schema sql_ignore_invalid_input_test; +set current_schema to 'sql_ignore_invalid_input_test'; set timezone to 'PRC'; -- type: tinyint @@ -310,5 +310,5 @@ select * from t_bit; -- restore context reset timezone; show timezone; -\c postgres -drop database if exists sql_ignore_invalid_input_test; \ No newline at end of file +drop schema sql_ignore_invalid_input_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql index 349b9b6ee..9ea1455ac 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_no_matched_partition.sql @@ -1,6 +1,6 @@ -- test for ignore error of no partition matched -create database sql_ignore_no_matched_partition_test dbcompatibility 'B'; -\c sql_ignore_no_matched_partition_test; +create schema sql_ignore_no_matched_partition_test; +set current_schema to 'sql_ignore_no_matched_partition_test'; -- sqlbypass set enable_opfusion = on; @@ -157,5 +157,5 @@ set enable_opfusion = on; set enable_partition_opfusion = off; drop table t_ignore; drop table t_from; -\c postgres -drop database if exists sql_ignore_no_matched_partition_test; \ No newline at end of file +drop schema sql_ignore_no_matched_partition_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql index d54751bb4..35938ba7b 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_not_null_constraints.sql @@ -1,6 +1,6 @@ -- test for insert/update ignore. -create database sql_ignore_not_null_test dbcompatibility 'B'; -\c sql_ignore_not_null_test; +create schema sql_ignore_not_null_test; +set current_schema to 'sql_ignore_not_null_test'; drop table if exists t_ignore; create table t_ignore(col1 int, col2 int not null, col3 varchar not null); @@ -547,5 +547,5 @@ update ignore t_ignore set num = null where num = 1; select * from t_ignore; -- restore context -\c postgres -drop database if exists sql_ignore_not_null_test; \ No newline at end of file +drop schema sql_ignore_not_null_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql index 7556cbe1d..c931e9c35 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_type_transform.sql @@ -1,5 +1,5 @@ -create database sql_ignore_type_transform_test dbcompatibility 'B'; -\c sql_ignore_type_transform_test; +create schema sql_ignore_type_transform_test; +set current_schema to 'sql_ignore_type_transform_test'; -- test for tinyint drop table if exists t; @@ -343,5 +343,5 @@ insert into t_text values(123456789123456789); insert ignore into t_nvarchar2 select cont from t_text; select * from t_nvarchar2; -\c postgres -drop database if exists sql_ignore_type_transform_test; \ No newline at end of file +drop schema sql_ignore_type_transform_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql b/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql index 08a6f5cc1..fd4e0e2bf 100644 --- a/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql +++ b/contrib/dolphin/sql/keyword_ignore_test/ignore_unique_constraints.sql @@ -1,5 +1,5 @@ -create database sql_ignore_unique_test dbcompatibility 'B'; -\c sql_ignore_unique_test; +create schema sql_ignore_unique_test; +set current_schema to 'sql_ignore_unique_test'; drop table if exists t_ignore; create table t_ignore(col1 int, col2 int unique, col3 int unique); @@ -177,5 +177,5 @@ insert into t_ignore values(2); update ignore t_ignore set num = 1 where num = 2; select * from t_ignore; -\c postgres -drop database if exists sql_ignore_unique_test; \ No newline at end of file +drop schema sql_ignore_unique_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/kill.sql b/contrib/dolphin/sql/kill.sql index d3dc3ff4d..e60ea0581 100644 --- a/contrib/dolphin/sql/kill.sql +++ b/contrib/dolphin/sql/kill.sql @@ -1,8 +1,7 @@ -drop database if exists test_kill; -create database test_kill dbcompatibility 'b'; -\c test_kill +create schema test_kill; +set current_schema to 'test_kill'; kill query (select sessionid from pg_stat_activity where application_name = 'JobScheduler'); kill connection (select sessionid from pg_stat_activity where application_name = 'JobScheduler'); kill (select sessionid from pg_stat_activity where application_name = 'PercentileJob'); -\c postgres -drop database if exists test_kill; \ No newline at end of file +drop schema test_kill cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/like_default_test.sql b/contrib/dolphin/sql/like_default_test.sql index d9e093227..586a61909 100644 --- a/contrib/dolphin/sql/like_default_test.sql +++ b/contrib/dolphin/sql/like_default_test.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists like_default_test; --- create database like_default_test dbcompatibility 'b'; -create database like_default_test with DBCOMPATIBILITY = 'B'; -\c like_default_test +create schema like_default_test; +set current_schema to 'like_default_test'; create table test_nv (name national varchar(10)); \d test_nv @@ -395,11 +392,5 @@ insert into test_insert(c1, c2, c3, c4) values(8, null, 'e', null); select * from test_insert; -\c postgres -drop database if exists like_default_test; - - - - - - +drop schema like_default_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/mysqlmode_fullgroup.sql b/contrib/dolphin/sql/mysqlmode_fullgroup.sql index 155df09bd..274e7c968 100644 --- a/contrib/dolphin/sql/mysqlmode_fullgroup.sql +++ b/contrib/dolphin/sql/mysqlmode_fullgroup.sql @@ -1,5 +1,5 @@ -CREATE DATABASE sql_mode_full_group dbcompatibility 'B'; -\c sql_mode_full_group; +create schema sql_mode_full_group; +set current_schema to 'sql_mode_full_group'; create table test_group(a int, b int, c int, d int); create table test_group1(a int, b int, c int, d int); insert into test_group values(1,2,3,4); @@ -18,5 +18,5 @@ set dolphin.sql_mode = ''; select a, b from test_group group by a; select a, d as items, (select count(*) from test_group t where t.a = i.a and b in (select b from test_group1 where c = 4)) as third from test_group i group by a; select t.a, (select sum(b) from test_group i where i.b = t.b ) from test_group t where t.a > 1+1 or (t.b < 8 and t.b > 1) group by t.a; -\c contrib_regression; -drop DATABASE if exists sql_mode_full_group; \ No newline at end of file +drop schema sql_mode_full_group cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/mysqlmode_strict.sql b/contrib/dolphin/sql/mysqlmode_strict.sql index 57a1e2b48..225ca18a7 100644 --- a/contrib/dolphin/sql/mysqlmode_strict.sql +++ b/contrib/dolphin/sql/mysqlmode_strict.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict; - -CREATE DATABASE sql_mode_strict dbcompatibility 'B'; -\c sql_mode_strict; +create schema sql_mode_strict; +set current_schema to 'sql_mode_strict'; set dolphin.sql_mode = ''; create table test_tint(a tinyint); @@ -748,5 +746,5 @@ insert into test_notnull_numeric_strict(b) values(null); -\c contrib_regression; -drop DATABASE if exists sql_mode_strict; +drop schema sql_mode_strict cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/mysqlmode_strict2.sql b/contrib/dolphin/sql/mysqlmode_strict2.sql index f60aaa2ec..66c3b3f15 100644 --- a/contrib/dolphin/sql/mysqlmode_strict2.sql +++ b/contrib/dolphin/sql/mysqlmode_strict2.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists sql_mode_strict2; - -CREATE DATABASE sql_mode_strict2 dbcompatibility 'B'; -\c sql_mode_strict2; +create schema sql_mode_strict2; +set current_schema to 'sql_mode_strict2'; set dolphin.sql_mode = ''; create table test_tint(a tinyint unsigned); @@ -748,5 +746,5 @@ insert into test_notnull_numeric_strict(b) values(null); -\c contrib_regression; -drop DATABASE if exists sql_mode_strict2; +drop schema sql_mode_strict2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/network.sql b/contrib/dolphin/sql/network.sql index 8502c72f1..2d84e3d8c 100644 --- a/contrib/dolphin/sql/network.sql +++ b/contrib/dolphin/sql/network.sql @@ -1,6 +1,5 @@ -drop database if exists test_network; -create database test_network dbcompatibility 'b'; -\c test_network +create schema test_network; +set current_schema to 'test_network'; create table test (ip1 varchar(20),ip2 char(20),ip3 nvarchar2(20),ip4 text,ip5 clob); insert into test (ip1,ip2,ip3,ip4,ip5) values ('192.168.1.1','127.0.0.1','10.0.0.10','172.0.0.1','0.0.0.0'),('fe80::1','a::f','a::c','a::d','a::e'),('192.168.1.256','192.168.1','256.168.1.1','192.256.1.1','192.168.1.-1'); select is_ipv4(ip1),is_ipv4(ip2),is_ipv4(ip3),is_ipv4(ip4),is_ipv4(ip5) from test; @@ -25,5 +24,5 @@ select is_ipv6('::3'); select is_ipv6(10); select is_ipv6(3.5); select is_ipv6(NULL); -\c postgres -drop database if exists test_network; \ No newline at end of file +drop schema test_network cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/network2.sql b/contrib/dolphin/sql/network2.sql index ce8d6e84e..50d6bcb07 100644 --- a/contrib/dolphin/sql/network2.sql +++ b/contrib/dolphin/sql/network2.sql @@ -1,6 +1,5 @@ -drop database if exists network2; -create database network2 dbcompatibility 'b'; -\c network2 +create schema network2; +set current_schema to 'network2'; set dolphin.sql_mode = ''; select inet_ntoa(inet_aton('255.255.255.255.255.255.255.255')); select inet_ntoa(1099511627775),inet_ntoa(4294902271),inet_ntoa(4294967295),inet_ntoa(511); @@ -144,5 +143,5 @@ SELECT IS_IPV4_MAPPED(INET6_ATON('::1')),IS_IPV4_COMPAT(INET6_ATON('::1')); SELECT IS_IPV4_MAPPED(INET6_ATON('::')),IS_IPV4_COMPAT(INET6_ATON('::')); SELECT IS_IPV4_MAPPED(NULL),IS_IPV4_COMPAT(NULL); reset dolphin.sql_mode; -\c postgres -drop database if exists network2; \ No newline at end of file +drop schema network2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/none_strict_warning.sql b/contrib/dolphin/sql/none_strict_warning.sql index 2c46b20bd..68b893674 100644 --- a/contrib/dolphin/sql/none_strict_warning.sql +++ b/contrib/dolphin/sql/none_strict_warning.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists none_strict_warning_test; -create database none_strict_warning_test dbcompatibility 'b'; -\c none_strict_warning_test +create schema none_strict_warning_test; +set current_schema to 'none_strict_warning_test'; reset dolphin.sql_mode; create table test_int1(c1 int1); @@ -1101,6 +1099,5 @@ select * from test_uint1; select * from test_uint2; select * from test_uint4; select * from test_uint8; ----- drop database -\c contrib_regression -DROP DATABASE none_strict_warning_test; \ No newline at end of file +drop schema none_strict_warning_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/nvarchar.sql b/contrib/dolphin/sql/nvarchar.sql index 4ce4a1e19..588673b7b 100644 --- a/contrib/dolphin/sql/nvarchar.sql +++ b/contrib/dolphin/sql/nvarchar.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists db_nvarchar; -create database db_nvarchar dbcompatibility 'b'; - -\c db_nvarchar +create schema db_nvarchar; +set current_schema to 'db_nvarchar'; -- -- VARCHAR -- @@ -69,5 +66,5 @@ INSERT INTO NVARCHAR_TBL (f1) VALUES ('abcde'); INSERT INTO NVARCHAR_TBL (f1) VALUES ('abcd '); SELECT '' AS four, * FROM NVARCHAR_TBL; -\c postgres -drop database if exists db_nvarchar; \ No newline at end of file +drop schema db_nvarchar cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/oct.sql b/contrib/dolphin/sql/oct.sql index 9f00f22d1..454b57c27 100644 --- a/contrib/dolphin/sql/oct.sql +++ b/contrib/dolphin/sql/oct.sql @@ -1,6 +1,5 @@ -drop database if exists db_oct; -create database db_oct dbcompatibility 'b'; -\c db_oct +create schema db_oct; +set current_schema to 'db_oct'; -- 测试正常数字十进制转八进制 SELECT OCT(10); @@ -35,5 +34,5 @@ insert into test_oct values('10'),('11'); select oct(name) from test_oct; drop table if exists test_oct; -\c postgres -drop database if exists db_oct; \ No newline at end of file +drop schema db_oct cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/option.sql b/contrib/dolphin/sql/option.sql index c90c9e559..9c507b910 100644 --- a/contrib/dolphin/sql/option.sql +++ b/contrib/dolphin/sql/option.sql @@ -1,6 +1,5 @@ -drop database if exists option; -create database option dbcompatibility = 'b'; -\c option +create schema option; +set current_schema to 'option'; create global temp table test1(a int primary key, b text) on commit delete rows engine = InnoDB with(STORAGE_TYPE = ASTORE); @@ -141,6 +140,5 @@ drop table test8; drop table test9; drop table test10; -\c postgres - -drop database option; +drop schema option cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_maxvalue_test.sql b/contrib/dolphin/sql/partition_maxvalue_test.sql index 68180e380..96dfe89d1 100644 --- a/contrib/dolphin/sql/partition_maxvalue_test.sql +++ b/contrib/dolphin/sql/partition_maxvalue_test.sql @@ -1,6 +1,5 @@ -drop DATABASE if exists partition_maxvalue_test; -CREATE DATABASE partition_maxvalue_test dbcompatibility 'B'; -\c partition_maxvalue_test; +create schema partition_maxvalue_test; +set current_schema to 'partition_maxvalue_test'; --test MAXVALUE syntax CREATE TABLE IF NOT EXISTS testsubpart ( @@ -60,5 +59,5 @@ CREATE TABLE testpart3 (a int) DISTRIBUTE BY RANGE(a) ); create table testpart4(a int) DISTRIBUTE by range(a) (SLICE p0 start (1) end MAXVALUE); create table testpart5(a int) DISTRIBUTE by range(a) (SLICE p0 start MAXVALUE end (200), SLICE p1 end(300)); -\c postgres; -drop DATABASE if exists partition_maxvalue_test; +drop schema partition_maxvalue_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_test1.sql b/contrib/dolphin/sql/partition_test1.sql index 2ea524a70..fe373c463 100644 --- a/contrib/dolphin/sql/partition_test1.sql +++ b/contrib/dolphin/sql/partition_test1.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test1; - -CREATE DATABASE partition_test1 dbcompatibility 'B'; -\c partition_test1; +create schema partition_test1; +set current_schema to 'partition_test1'; -------test range partition tables ----test partition table @@ -143,13 +141,13 @@ create index idx_b on test_part_list using btree(b) local; alter table test_part_list add constraint uidx_d unique(d); alter table test_part_list add constraint uidx_c unique using index idx_c; insert into test_part_list values(2000,1,2,3),(3000,2,3,4),(4000,3,4,5),(5000,4,5,6); -select * from test_part_list; +select * from test_part_list order by a desc; select relname, parttype from pg_partition where (parentid in (select oid from pg_class where relname = 'test_part_list')) and oid != relfilenode order by relname; ALTER TABLE test_part_list REBUILD PARTITION p1, p2; -select * from test_part_list; +select * from test_part_list order by a desc; select relname, parttype from pg_partition where (parentid in (select oid from pg_class where relname = 'test_part_list')) and oid != relfilenode order by relname; ALTER TABLE test_part_list REBUILD PARTITION all; -select * from test_part_list; +select * from test_part_list order by a desc; select relname, parttype from pg_partition where (parentid in (select oid from pg_class where relname = 'test_part_list')) and oid != relfilenode order by relname; @@ -323,5 +321,5 @@ select * from test_part_segment where ((980 < d and d < 1000) or (2180 < d and d select * from test_part_segment where ((980 < b and b < 1000) or (2180 < b and b < 2200)); --test remove partitioning alter table test_part_segment remove partitioning; -\c postgres; -drop DATABASE if exists partition_test1; +drop schema partition_test1 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_test2.sql b/contrib/dolphin/sql/partition_test2.sql index 830eb1927..752d8946e 100644 --- a/contrib/dolphin/sql/partition_test2.sql +++ b/contrib/dolphin/sql/partition_test2.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test2; - -CREATE DATABASE partition_test2 dbcompatibility 'B'; -\c partition_test2; +create schema partition_test2; +set current_schema to 'partition_test2'; CREATE TABLE IF NOT EXISTS test_part1 ( a int, @@ -206,5 +204,5 @@ select * from test_part_hash; select * from test_no_part1; alter table test_part_hash analyze partition p0,p1; alter table test_part_hash analyze partition all; -\c postgres; -drop DATABASE if exists partition_test2; +drop schema partition_test2 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/partition_test3.sql b/contrib/dolphin/sql/partition_test3.sql index 4283258fd..517f8aa2d 100644 --- a/contrib/dolphin/sql/partition_test3.sql +++ b/contrib/dolphin/sql/partition_test3.sql @@ -1,7 +1,5 @@ -drop DATABASE if exists partition_test3; - -CREATE DATABASE partition_test3 dbcompatibility 'B'; -\c partition_test3; +create schema partition_test3; +set current_schema to 'partition_test3'; --test add and drop CREATE TABLE IF NOT EXISTS test_part2 ( @@ -108,5 +106,5 @@ PARTITION BY RANGE(a) SUBPARTITION BY RANGE(b) ALTER TABLE test_part2_1 add PARTITION p1 VALUES LESS THAN (200) (SUBPARTITION p1_0 VALUES LESS THAN (100)); ALTER TABLE test_part2_1 add PARTITION p2 VALUES (add(600,100)) (SUBPARTITION p2_0 VALUES LESS THAN (100)); ALTER TABLE test_part2_1 add PARTITION p3 VALUES (DEFAULT) (SUBPARTITION p3_0 VALUES LESS THAN (100)); -\c postgres; -drop DATABASE if exists partition_test3; +drop schema partition_test3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/partition_test4.sql b/contrib/dolphin/sql/partition_test4.sql index 4f1ef2836..e1d9c7e63 100644 --- a/contrib/dolphin/sql/partition_test4.sql +++ b/contrib/dolphin/sql/partition_test4.sql @@ -1,6 +1,5 @@ -drop DATABASE if exists partition_test4; -CREATE DATABASE partition_test4 dbcompatibility 'B'; -\c partition_test4; +create schema partition_test4; +set current_schema to 'partition_test4'; CREATE TABLE test_range_subpart ( a INT4 PRIMARY KEY, @@ -275,5 +274,5 @@ partition p1 values less than(200), partition p2 values less than(300), partition p3 values less than (maxvalue) ); -\c postgres; -drop DATABASE if exists partition_test4; \ No newline at end of file +drop schema partition_test4 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/pl_debugger_client.sql b/contrib/dolphin/sql/pl_debugger_client.sql index 7ec6e79b5..da53cfbfa 100644 --- a/contrib/dolphin/sql/pl_debugger_client.sql +++ b/contrib/dolphin/sql/pl_debugger_client.sql @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- wait for server establishment select pg_sleep(3); diff --git a/contrib/dolphin/sql/pl_debugger_server.sql b/contrib/dolphin/sql/pl_debugger_server.sql index b17229c52..dfc62cf74 100644 --- a/contrib/dolphin/sql/pl_debugger_server.sql +++ b/contrib/dolphin/sql/pl_debugger_server.sql @@ -1,4 +1,4 @@ -\c test_ansi_quotes +set current_schema to 'test_ansi_quotes'; SET dolphin.sql_mode TO 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -- setups drop schema if exists pl_debugger cascade; diff --git a/contrib/dolphin/sql/read_only_guc_test.sql b/contrib/dolphin/sql/read_only_guc_test.sql index 2b058cca6..c0b563f5b 100755 --- a/contrib/dolphin/sql/read_only_guc_test.sql +++ b/contrib/dolphin/sql/read_only_guc_test.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists read_only_guc_test; --- create database read_only_guc_test dbcompatibility 'b'; -create database read_only_guc_test with DBCOMPATIBILITY = 'B'; -\c read_only_guc_test +create schema read_only_guc_test; +set current_schema to 'read_only_guc_test'; show version_comment; show auto_increment_increment; @@ -44,5 +41,5 @@ SELECT * FROM pg_settings WHERE NAME='system_time_zone'; SELECT * FROM pg_settings WHERE NAME='time_zone'; SELECT * FROM pg_settings WHERE NAME='wait_timeout'; -\c postgres -drop database if exists read_only_guc_test; +drop schema read_only_guc_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/regexp.sql b/contrib/dolphin/sql/regexp.sql index ee9895a8d..fad6113c6 100644 --- a/contrib/dolphin/sql/regexp.sql +++ b/contrib/dolphin/sql/regexp.sql @@ -1,7 +1,6 @@ -drop database if exists db_regexp; -create database db_regexp dbcompatibility 'b'; -\c db_regexp +create schema db_regexp; +set current_schema to 'db_regexp'; select regexp('a', true); -\c postgres -drop database if exists db_regexp; \ No newline at end of file +drop schema db_regexp cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/replace_test/replace.sql b/contrib/dolphin/sql/replace_test/replace.sql index 32846fd70..88b3baa31 100755 --- a/contrib/dolphin/sql/replace_test/replace.sql +++ b/contrib/dolphin/sql/replace_test/replace.sql @@ -1,6 +1,5 @@ -drop database if exists db_replace; -create database db_replace dbcompatibility 'B'; -\c db_replace; +create schema db_replace; +set current_schema to 'db_replace'; create table t1 (a int); create table t2 (a int); @@ -56,5 +55,5 @@ replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) values(4); replace DELAYED into Parts partition(p1) select A from T2 where A >=2 ; -\c postgres -drop database db_replace; +drop schema db_replace cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/second_microsecond.sql b/contrib/dolphin/sql/second_microsecond.sql index 5b1d3fbf7..12ba05354 100644 --- a/contrib/dolphin/sql/second_microsecond.sql +++ b/contrib/dolphin/sql/second_microsecond.sql @@ -1,5 +1,5 @@ -create database second_microsecond dbcompatibility = 'b'; -\c second_microsecond +create schema second_microsecond; +set current_schema to 'second_microsecond'; select microsecond(timestamp '2021-11-4 16:30:44.3411'); select second(timestamp(6) '2021-11-4 16:30:44.3411'); select microsecond(datetime '2021-11-4 16:30:44.3411'); @@ -11,5 +11,5 @@ select second(time(6) '2021-11-4 16:30:44.3411'); select microsecond(timetz '2021-11-4 16:30:44.3411'); select second(timetz(6) '2021-11-4 16:30:44.3411'); -\c postgres -drop database second_microsecond; \ No newline at end of file +drop schema second_microsecond cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/set_password.sql b/contrib/dolphin/sql/set_password.sql index 38710186f..d8c931b6c 100644 --- a/contrib/dolphin/sql/set_password.sql +++ b/contrib/dolphin/sql/set_password.sql @@ -1,6 +1,5 @@ -drop database if exists test_set_password; -create database test_set_password dbcompatibility 'b'; -\c test_set_password +create schema test_set_password; +set current_schema to 'test_set_password'; set password = 'abc@1234'; set password for current_user() = 'abc@2345'; create user user1 password 'abc@1234'; @@ -10,6 +9,7 @@ set password for 'user1'@'%' = 'abc@3456' replace 'abc@2345'; set session authorization user1 password 'abc@3456'; set password for 'user1'@'%' = PASSWORD('abc@4567') replace 'abc@3456'; set session authorization user1 password 'abc@4567'; -\c postgres -drop database if exists test_set_password; -drop user user1; \ No newline at end of file +\c contrib_regression +drop user user1; +drop schema test_set_password cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show.sql b/contrib/dolphin/sql/show.sql index 7b9eb922b..f2c29b110 100644 --- a/contrib/dolphin/sql/show.sql +++ b/contrib/dolphin/sql/show.sql @@ -1,5 +1,5 @@ -create database show_test dbcompatibility 'b'; -\c show_test +create schema show_test; +set current_schema to 'show_test'; create user grant_test identified by 'H&*#^DH85@#(J'; set search_path = 'grant_test'; create table test(id int); @@ -75,5 +75,5 @@ SHOW COLLATION WHERE charset = 'win1251'; reset search_path; drop user grant_test cascade; -\c postgres -drop database show_test; \ No newline at end of file +drop schema show_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show_create.sql b/contrib/dolphin/sql/show_create.sql index c3380cb53..93ce14e0f 100644 --- a/contrib/dolphin/sql/show_create.sql +++ b/contrib/dolphin/sql/show_create.sql @@ -1,6 +1,5 @@ -drop database if exists show_create; -create database show_create dbcompatibility 'b'; -\c show_create +create schema show_create; +set current_schema to 'show_create'; CREATE USER test_showcreate WITH PASSWORD 'openGauss@123'; GRANT ALL PRIVILEGES TO test_showcreate; SET ROLE test_showcreate PASSWORD 'openGauss@123'; @@ -462,5 +461,5 @@ reset current_schema; drop schema test_get_def cascade; RESET ROLE; DROP USER test_showcreate; -\c postgres -drop database if exists show_create; \ No newline at end of file +drop schema show_create cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show_create_database.sql b/contrib/dolphin/sql/show_create_database.sql index e5a58f639..1f4cb1044 100644 --- a/contrib/dolphin/sql/show_create_database.sql +++ b/contrib/dolphin/sql/show_create_database.sql @@ -1,6 +1,5 @@ -drop database if exists show_createdatabase; -create database show_createdatabase dbcompatibility 'b'; -\c show_createdatabase +create schema show_createdatabase; +set current_schema to 'show_createdatabase'; CREATE USER test_showcreate_database WITH PASSWORD 'openGauss@123'; GRANT ALL PRIVILEGES TO test_showcreate_database; SET ROLE test_showcreate_database PASSWORD 'openGauss@123'; @@ -20,5 +19,5 @@ show create database aa; drop schema test_get_database cascade; RESET ROLE; DROP USER test_showcreate_database; -\c postgres -drop database if exists show_createdatabase; \ No newline at end of file +drop schema show_createdatabase cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/show_variables.sql b/contrib/dolphin/sql/show_variables.sql index 0878c5f11..e649b53b6 100644 --- a/contrib/dolphin/sql/show_variables.sql +++ b/contrib/dolphin/sql/show_variables.sql @@ -1,6 +1,5 @@ -drop database if exists show_variables; -create database show_variables dbcompatibility 'b'; -\c show_variables +create schema show_variables; +set current_schema to 'show_variables'; SET datestyle TO postgres, dmy; show variables like 'DateSty%'; show variables where variable_name like 'DateSty%'; @@ -15,5 +14,5 @@ SET datestyle TO ISO, MDY; show session variables where variable_name = 'DateStyle'; show global variables where variable_name = 'DateStyle'; RESET datestyle; -\c postgres -drop database if exists show_variables; \ No newline at end of file +drop schema show_variables cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/signed_unsigned_cast.sql b/contrib/dolphin/sql/signed_unsigned_cast.sql index f327406eb..6fbfb5f34 100644 --- a/contrib/dolphin/sql/signed_unsigned_cast.sql +++ b/contrib/dolphin/sql/signed_unsigned_cast.sql @@ -1,9 +1,8 @@ -drop database if exists signed_unsigned_cast; -create database signed_unsigned_cast dbcompatibility 'b'; -\c signed_unsigned_cast +create schema signed_unsigned_cast; +set current_schema to 'signed_unsigned_cast'; select cast(1-2 as unsigned); select cast(3-5 as signed); select cast(cast(1 - 5 as signed) as unsigned); select cast(cast(1 + 5 as unsigned) as signed); -\c postgres -drop database signed_unsigned_cast; \ No newline at end of file +drop schema signed_unsigned_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/single_line_trigger.sql b/contrib/dolphin/sql/single_line_trigger.sql index aeab2588a..1e8d2a394 100644 --- a/contrib/dolphin/sql/single_line_trigger.sql +++ b/contrib/dolphin/sql/single_line_trigger.sql @@ -1,8 +1,7 @@ --create trigger -- test mysql compatibility trigger -drop database if exists db_mysql; -create database db_mysql dbcompatibility 'B'; -\c db_mysql +create schema db_mysql; +set current_schema to 'db_mysql'; create table t (id int); create table t1 (id int); create table animals (id int, name char(30)); @@ -241,5 +240,5 @@ drop procedure proc; reset enable_set_variable_b_format; -\c postgres -drop database db_mysql; +drop schema db_mysql cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql b/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql index 855ab5e54..01947121a 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_ascii_test.sql @@ -1,11 +1,18 @@ -SELECT ASCII('a'); -SELECT ASCII('你'); drop database if exists db_b_ascii_test; -create database db_b_ascii_test dbcompatibility 'B'; +create database db_b_ascii_test dbcompatibility 'A'; \c db_b_ascii_test SELECT ASCII('a'); SELECT ASCII('你'); -\c postgres +\c contrib_regression drop database db_b_ascii_test; + +create schema db_b_ascii_test; +set current_schema to 'db_b_ascii_test'; + +SELECT ASCII('a'); +SELECT ASCII('你'); + +drop schema db_b_ascii_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql b/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql index ed9cf9395..c499b6692 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_from_base64_test.sql @@ -1,6 +1,5 @@ -drop database if exists from_base64; -create database from_base64 dbcompatibility 'b'; -\c from_base64 +create schema from_base64; +set current_schema to 'from_base64'; --测试正常base64编码作为输入,返回base64编码的解码结果 SELECT FROM_BASE64('YWJj'); @@ -24,5 +23,5 @@ CREATE TABLE test_base64 (name text); INSERT INTO test_base64 values('YWJj'), ('MTIzNDU2'), ('asjeifj'); SELECT FROM_BASE64(name) from test_base64; -\c postgres -drop database if exists from_base64; \ No newline at end of file +drop schema from_base64 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql b/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql index b040b4307..12043ad88 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_insert_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_insert_test; -create database db_b_insert_test dbcompatibility 'B'; -\c db_b_insert_test +create schema db_b_insert_test; +set current_schema to 'db_b_insert_test'; select insert('abcdefg', 2, 4, 'yyy'); select insert(1234567, 2, 4, 'yyy'); @@ -26,5 +25,5 @@ select insert('abcdefg', -4123213214212123123123123, 4, 'yyy'); select insert('abcdefg', 412321321421, 4, 'yyy'); select insert('abcdefg', -412321321421, 4, 'yyy'); -\c postgres -drop database db_b_insert_test; \ No newline at end of file +drop schema db_b_insert_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql b/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql index 8c2c3f9f5..43aa4ed6d 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_left_right_test.sql @@ -1,11 +1,17 @@ +drop database if exists db_b_left_right_test; +create database db_b_left_right_test dbcompatibility 'A'; +\c db_b_left_right_test + SELECT left('abcdefg', 3); SELECT left('abcdefg', -3); SELECT right('abcdefg', 3); SELECT right('abcdefg', -3); -drop database if exists db_b_left_right_test; -create database db_b_left_right_test dbcompatibility 'B'; -\c db_b_left_right_test +\c contrib_regression +drop database db_b_left_right_test; + +create schema db_b_left_right_test; +set current_schema to 'db_b_left_right_test'; set bytea_output to escape; @@ -68,5 +74,5 @@ select left('abc',5/2); select right('abc',2.5); select right('abc',5/2); -\c postgres -drop database db_b_left_right_test; \ No newline at end of file +drop schema db_b_left_right_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql b/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql index 9cd5a05a6..452a1a28a 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_ord_test.sql @@ -1,6 +1,5 @@ -drop database if exists db_b_ord_test; -create database db_b_ord_test dbcompatibility 'B'; -\c db_b_ord_test +create schema db_b_ord_test; +set current_schema to 'db_b_ord_test'; -- test 1 byte select ord('1111'); @@ -32,5 +31,5 @@ insert into test_ord values('1234'), ('嬴政'), ('𓃔𓃘𓃲𓃰'); select ord(name) from test_ord; -\c postgres -drop database if exists db_b_ord_test; \ No newline at end of file +drop schema db_b_ord_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql b/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql index 1228354d4..c1a260b95 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_quote_test.sql @@ -1,10 +1,9 @@ -drop database if exists db_b_quote_test; -create database db_b_quote_test dbcompatibility 'B'; -\c db_b_quote_test +create schema db_b_quote_test; +set current_schema to 'db_b_quote_test'; SELECT QUOTE(E'Don\'t!'); SELECT QUOTE(E'O\'hello'); SELECT QUOTE('O\hello'); -\c postgres -drop database db_b_quote_test; \ No newline at end of file +drop schema db_b_quote_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql b/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql index 78155f18d..664ea7b6f 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_string_length_test.sql @@ -1,16 +1,22 @@ +drop database if exists db_b_string_length_test; +create database db_b_string_length_test dbcompatibility 'A'; +\c db_b_string_length_test + SELECT length('jose'); SELECT length('你好呀'); SELECT LENGTH(B'101'); SELECT length('你好呀jose'); -drop database if exists db_b_string_length_test; -create database db_b_string_length_test dbcompatibility 'B'; -\c db_b_string_length_test +\c contrib_regression +drop database db_b_string_length_test; + +create schema db_b_string_length_test; +set current_schema to 'db_b_string_length_test'; SELECT length('jose'); SELECT length('你好呀'); SELECT LENGTH(B'101'); SELECT length('你好呀jose'); -\c postgres -drop database db_b_string_length_test; \ No newline at end of file +drop schema db_b_string_length_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql b/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql index ea95bef62..cec95bf2c 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_substr_test.sql @@ -1,3 +1,7 @@ +drop database if exists db_b_substr_test; +create database db_b_substr_test dbcompatibility 'A'; +\c db_b_substr_test + DROP TABLE IF EXISTS template_string; CREATE TABLE template_string(a TEXT, b BYTEA); INSERT INTO template_string VALUES('abcdefghijklmnopqrstuvwxyz', 'abcdefghijklmnopqrstuvwxyz'); @@ -39,9 +43,11 @@ FROM template_string; DROP TABLE IF EXISTS template_string; -drop database if exists db_b_substr_test; -create database db_b_substr_test dbcompatibility 'B'; -\c db_b_substr_test +\c contrib_regression +drop database db_b_substr_test; + +create schema db_b_substr_test; +set current_schema to 'db_b_substr_test'; set bytea_output to escape; @@ -193,5 +199,5 @@ select c1, c2, substr(c1 from c2) from test_row order by c1; select c1, c2, substr(c1 for c2) from test_column order by c1; select c1, c2, substr(c1 for c2) from test_row order by c1; -\c postgres -drop database db_b_substr_test; +drop schema db_b_substr_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql b/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql index f2b0eb741..d38753f93 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_to_base64_test.sql @@ -1,6 +1,5 @@ -drop database if exists to_base64_test; -create database to_base64_test dbcompatibility 'b'; -\c to_base64_test +create schema to_base64_test; +set current_schema to 'to_base64_test'; --测试字符串作为输入,返回base64编码的编码结果 SELECT TO_BASE64('123456'); @@ -33,5 +32,5 @@ CREATE TABLE test_base64 (name text); INSERT INTO test_base64 values('123456'), ('to_base64'); SELECT TO_BASE64(name) from test_base64; -\c postgres -drop database if exists to_base64_test; +drop schema to_base64_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql b/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql index 79bd07ee8..99f1f16be 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_trim_test.sql @@ -1,3 +1,7 @@ +drop database if exists db_b_trim_test; +create database db_b_trim_test dbcompatibility 'A'; +\c db_b_trim_test + SELECT TRIM(' bar '); SELECT TRIM(LEADING 'x' FROM 'xxxbarxxx'); SELECT TRIM(BOTH 'x' FROM 'xxxbarxxx'); @@ -6,9 +10,11 @@ SELECT TRIM(LEADING 'xyz' FROM 'xyzxbarxxyz'); SELECT TRIM(BOTH 'xyz' FROM 'xyzxbarxxyz'); SELECT TRIM(TRAILING 'xyz' FROM 'xyzxbarxxyz'); -drop database if exists db_b_trim_test; -create database db_b_trim_test dbcompatibility 'B'; -\c db_b_trim_test +\c contrib_regression +drop database db_b_trim_test; + +create schema db_b_trim_test; +set current_schema to 'db_b_trim_test'; SELECT TRIM(' bar '); SELECT TRIM(LEADING 'x' FROM 'xxxbarxxx'); @@ -31,5 +37,5 @@ SELECT TRIM(' X '::bytea); SELECT TRIM(LEADING ' X '::bytea); SELECT TRIM(TRAILING ' X '::bytea); -\c postgres -drop database db_b_trim_test; \ No newline at end of file +drop schema db_b_trim_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql b/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql index 915b79ec1..b737bc0ed 100644 --- a/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql +++ b/contrib/dolphin/sql/string_func_test/db_b_unhex_test.sql @@ -1,6 +1,5 @@ -drop database if exists unhex_test; -create database unhex_test dbcompatibility 'b'; -\c unhex_test +create schema unhex_test; +set current_schema to 'unhex_test'; --测试字符串作为输入,返回十六进制编码的编码结果 SELECT UNHEX('6f70656e4761757373'); @@ -31,5 +30,5 @@ CREATE TABLE test_unhex (name text); INSERT INTO test_unhex values('4142'), ('6f70656e4761757373'); SELECT UNHEX(name) from test_unhex; -\c postgres -drop database if exists unhex_test; +drop schema unhex_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/string_func_test/test_substring_index.sql b/contrib/dolphin/sql/string_func_test/test_substring_index.sql index 600edf274..bc67572bf 100644 --- a/contrib/dolphin/sql/string_func_test/test_substring_index.sql +++ b/contrib/dolphin/sql/string_func_test/test_substring_index.sql @@ -1,6 +1,5 @@ -drop database if exists test_substring_index; -create database test_substring_index dbcompatibility 'b'; -\c test_substring_index +create schema test_substring_index; +set current_schema to 'test_substring_index'; SELECT SUBSTRING_INDEX('www.opengauss.com','.',0); SELECT SUBSTRING_INDEX('www.opengauss.com','',2); SELECT SUBSTRING_INDEX('','.',2); @@ -44,5 +43,5 @@ SELECT SUBSTRING_INDEX(myFloat,'.',1) FROM myTable; SELECT SUBSTRING_INDEX(myBool,'1',1) FROM myTable; SELECT SUBSTRING_INDEX(myDate,'-',1) FROM myTable; drop table myTable; -\c postgres -drop database test_substring_index; \ No newline at end of file +drop schema test_substring_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_alter_table.sql b/contrib/dolphin/sql/test_alter_table.sql index 923a59c07..98838b43c 100644 --- a/contrib/dolphin/sql/test_alter_table.sql +++ b/contrib/dolphin/sql/test_alter_table.sql @@ -1,6 +1,5 @@ -drop database if exists db_alter_table; -create database db_alter_table dbcompatibility 'b'; -\c db_alter_table +create schema db_alter_table; +set current_schema to 'db_alter_table'; create table alter_table_tbl1 (a int primary key, b int); create table alter_table_tbl2 (c int primary key, d int); @@ -135,5 +134,5 @@ create table test_primary(f11 int, f12 varchar(20), f13 bool, constraint con_t_p \d+ test_primary drop table test_primary; -\c postgres -drop database if exists db_alter_table; +drop schema db_alter_table cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_binary.sql b/contrib/dolphin/sql/test_binary.sql index 376aaf5a9..e2d562046 100644 --- a/contrib/dolphin/sql/test_binary.sql +++ b/contrib/dolphin/sql/test_binary.sql @@ -1,6 +1,5 @@ -drop database if exists test_binary; -create database test_binary dbcompatibility 'B'; -\c test_binary +create schema test_binary; +set current_schema to 'test_binary'; create table binary_templates (a bytea, b binary(5), c varbinary(5)); -- invalid typmod @@ -83,5 +82,5 @@ select * from t_varbinary_061; drop table if exists t_binary_061; drop table if exists t_varbinary_061; -\c postgres -drop database test_binary; \ No newline at end of file +drop schema test_binary cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_bit_xor.sql b/contrib/dolphin/sql/test_bit_xor.sql index e00d0454d..47d40271a 100644 --- a/contrib/dolphin/sql/test_bit_xor.sql +++ b/contrib/dolphin/sql/test_bit_xor.sql @@ -1,6 +1,5 @@ -drop database if exists test_bit_xor; -create database test_bit_xor dbcompatibility 'B'; -\c test_bit_xor +create schema test_bit_xor; +set current_schema to 'test_bit_xor'; -- test datetime create table test_datetime (t datetime); @@ -344,5 +343,5 @@ insert into test_varbit values(b'101'); select bit_xor(col) from test_varbit; drop table test_varbit; -\c postgres -drop database test_bit_xor; \ No newline at end of file +drop schema test_bit_xor cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_blob.sql b/contrib/dolphin/sql/test_blob.sql index 119564f60..16a21a226 100644 --- a/contrib/dolphin/sql/test_blob.sql +++ b/contrib/dolphin/sql/test_blob.sql @@ -1,6 +1,5 @@ -drop database if exists test_blob; -create database test_blob dbcompatibility 'B'; -\c test_blob +create schema test_blob; +set current_schema to 'test_blob'; create table test_template (t tinyblob, b blob, m mediumblob, l longblob); insert into test_template values('aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa', 'aaaaaaaaa'); create table test_tiny (t tinyblob); @@ -61,5 +60,5 @@ drop table test_tiny; drop table test_blob; drop table test_medium; drop table test_long; -\c postgres -drop database test_blob; \ No newline at end of file +drop schema test_blob cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_checksum.sql b/contrib/dolphin/sql/test_checksum.sql index 834bd63af..b051218ff 100644 --- a/contrib/dolphin/sql/test_checksum.sql +++ b/contrib/dolphin/sql/test_checksum.sql @@ -1,6 +1,5 @@ -drop database if exists db_chk_tbl; -create database db_chk_tbl dbcompatibility 'b'; -\c db_chk_tbl +create schema db_chk_tbl; +set current_schema to 'db_chk_tbl'; CREATE SCHEMA tst_schema1; SET SEARCH_PATH TO tst_schema1; @@ -190,6 +189,5 @@ INSERT INTO t_same_cmp VALUES(2022001, 'same check'); CHECKSUM TABLE t_same, t_same_cmp; -\c postgres -drop database if exists db_chk_tbl; - +drop schema db_chk_tbl cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_condition.sql b/contrib/dolphin/sql/test_condition.sql index a6ef28be5..1a7652d81 100644 --- a/contrib/dolphin/sql/test_condition.sql +++ b/contrib/dolphin/sql/test_condition.sql @@ -1,8 +1,5 @@ --- b compatibility case -drop database if exists db_test_condition; --- create database db_test_condition dbcompatibility 'b'; -create database db_test_condition with DBCOMPATIBILITY = 'B'; -\c db_test_condition +create schema db_test_condition; +set current_schema to 'db_test_condition'; set dolphin.sql_mode = ''; create table test_bccf (t1 int ,t2 float, t3 char, t4 text); insert into test_bccf values(1,3,null,null); @@ -1066,5 +1063,5 @@ select strcmp(blb, txt) from typeset; -\c postgres -drop database db_test_condition; +drop schema db_test_condition cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_current_user.sql b/contrib/dolphin/sql/test_current_user.sql index e5389e702..caf43f41b 100644 --- a/contrib/dolphin/sql/test_current_user.sql +++ b/contrib/dolphin/sql/test_current_user.sql @@ -1,6 +1,5 @@ -drop database if exists test_current_user; -create database test_current_user dbcompatibility 'b'; -\c test_current_user +create schema test_current_user; +set current_schema to 'test_current_user'; select current_user; select current_user(); create user u1 password 'Gauss123'; @@ -70,5 +69,5 @@ DROP USER MAPPING FOR USER SERVER s1; CREATE USER MAPPING FOR u1 SERVER s1; DROP USER MAPPING FOR u1 SERVER s1; drop user u1; -\c postgres -drop database test_current_user; \ No newline at end of file +drop schema test_current_user cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_datatype.sql b/contrib/dolphin/sql/test_datatype.sql index 6726d2bbb..944bc8de0 100644 --- a/contrib/dolphin/sql/test_datatype.sql +++ b/contrib/dolphin/sql/test_datatype.sql @@ -1,6 +1,5 @@ -drop database if exists b_datatype_test; -create database b_datatype_test dbcompatibility 'B'; -\c b_datatype_test +create schema b_datatype_test; +set current_schema to 'b_datatype_test'; -- bit(n), when insert into bit, support the length less than n, which must be equal to n in normal case create table bit_test(a bit); @@ -91,5 +90,5 @@ create table all_int_test(a tinyint(9999999999), b smallint(9999999999), c mediu \d all_int_test drop table all_int_test; -\c postgres -drop database b_datatype_test; \ No newline at end of file +drop schema b_datatype_test cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_fixed.sql b/contrib/dolphin/sql/test_fixed.sql index 19d01842b..41bc4f9a9 100644 --- a/contrib/dolphin/sql/test_fixed.sql +++ b/contrib/dolphin/sql/test_fixed.sql @@ -1,9 +1,8 @@ -drop database if exists test_fixed; -create database test_fixed dbcompatibility 'B'; -\c test_fixed +create schema test_fixed; +set current_schema to 'test_fixed'; DROP TABLE IF EXISTS fixed_test; CREATE TABLE fixed_test (a fixed(10, 5)); \d fixed_test DROP TABLE fixed_test; -\c postgres -drop database test_fixed; \ No newline at end of file +drop schema test_fixed cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql b/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql index 95c74f83d..5201e56ef 100644 --- a/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql +++ b/contrib/dolphin/sql/test_float_double_real_double_precision_MD.sql @@ -1,6 +1,5 @@ -drop database if exists float_double_real_double_precision_MD; -create database float_double_real_double_precision_MD dbcompatibility 'b'; -\c float_double_real_double_precision_MD; +create schema double_precision; +set current_schema to 'double_precision'; create table test(a float(20, 2), b double(20, 2), c real(20, 2), d double precision(20, 2)); \d test; @@ -44,3 +43,6 @@ create table test3(a real(3.6, 1.6)); create table test3(a double precision(3.6, 1.6)); create table test3(a double(3.6, 1.6)); \d test3; + +drop schema double_precision cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_mysql_char.sql b/contrib/dolphin/sql/test_mysql_char.sql index 025048e8e..60abf7313 100644 --- a/contrib/dolphin/sql/test_mysql_char.sql +++ b/contrib/dolphin/sql/test_mysql_char.sql @@ -1,6 +1,5 @@ -drop database if exists test_char; -create database test_char with dbcompatibility='B'; -\c test_char +create schema test_char; +set current_schema to 'test_char'; set dolphin.b_compatibility_mode=1; set dolphin.sql_mode = ''; @@ -126,5 +125,5 @@ select '0.0100abc' || 1; select '0.0100abc' || 0; select '0.0100abc' || null; -\c postgres -drop database test_char; +drop schema test_char cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_mysql_enum.sql b/contrib/dolphin/sql/test_mysql_enum.sql index 4cece858b..169c7848c 100644 --- a/contrib/dolphin/sql/test_mysql_enum.sql +++ b/contrib/dolphin/sql/test_mysql_enum.sql @@ -1,6 +1,5 @@ -drop database if exists test_enum; -CREATE DATABASE test_enum with dbcompatibility='B'; -\c test_enum +create schema test_enum; +set current_schema to 'test_enum'; -- create extension dolphin; show sql_compatibility; CREATE TABLE test ( @@ -77,7 +76,7 @@ CREATE TABLE testtttttttttttttttttttttttttttttttttt ( myjobbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb enum('x','y') ); -SELECT * FROM pg_type WHERE typname like '%anonymous_enum%'; +SELECT count(*) FROM pg_type WHERE typname like 'testtttttttttttttttt_myjobbbbbbbbbbbbbbb%_anonymous_enum_1'; drop table testtttttttttttttttttttttttttttttttttt; @@ -171,5 +170,5 @@ W_COUNTRY VARCHAR(20) , W_GMT_OFFSET DECIMAL(5,2) ); -\c postgres -DROP DATABASE test_enum; +drop schema test_enum cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_mysql_operator.sql b/contrib/dolphin/sql/test_mysql_operator.sql index 747ae9f7c..f36ee0390 100644 --- a/contrib/dolphin/sql/test_mysql_operator.sql +++ b/contrib/dolphin/sql/test_mysql_operator.sql @@ -1,6 +1,5 @@ -drop database if exists test_op_and; -CREATE DATABASE test_op_and with dbcompatibility='B'; -\c test_op_and +create schema test_op_and; +set current_schema to 'test_op_and'; set dolphin.b_compatibility_mode = 1; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group'; @@ -380,13 +379,12 @@ drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; set dolphin.sql_mode = 'sql_mode_strict,sql_mode_full_group,pipes_as_concat'; -\c postgres -drop database test_op_and; +drop schema test_op_and cascade; +reset current_schema; -drop database if exists test_op_xor; -CREATE DATABASE test_op_xor with dbcompatibility='B'; -\c test_op_xor +create schema test_op_xor; +set current_schema to 'test_op_xor'; set dolphin.b_compatibility_mode = 1; select null^1; @@ -622,13 +620,12 @@ drop table testforint2_p5; ---drop database set dolphin.b_compatibility_mode = 0; -\c postgres -drop database test_op_xor; +drop schema test_op_xor cascade; +reset current_schema; -drop database if exists like_test; -create database like_test DBCOMPATIBILITY 'b'; -\c like_test +create schema like_test; +set current_schema to 'like_test'; set dolphin.b_compatibility_mode = 1; select 'a' like 'A'; @@ -893,5 +890,5 @@ select !10; select !!10; select 10!; -\c postgres -drop database if exists like_test; +drop schema like_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_mysql_prepare.sql b/contrib/dolphin/sql/test_mysql_prepare.sql index ac2955728..4535cf56f 100644 --- a/contrib/dolphin/sql/test_mysql_prepare.sql +++ b/contrib/dolphin/sql/test_mysql_prepare.sql @@ -1,6 +1,5 @@ -drop database if exists test_mysql_prepare; -create database test_mysql_prepare dbcompatibility 'b'; -\c test_mysql_prepare +create schema test_mysql_prepare; +set current_schema to 'test_mysql_prepare'; create table test(name text, age int); insert into test values('a',18); prepare s1 as select * from test; @@ -239,5 +238,5 @@ deallocate s2; reset dolphin.b_compatibility_mode; reset enable_set_variable_b_format; -\c postgres -drop database test_mysql_prepare; \ No newline at end of file +drop schema test_mysql_prepare cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_op_blob.sql b/contrib/dolphin/sql/test_op_blob.sql index e7834900e..8c1a737e9 100644 --- a/contrib/dolphin/sql/test_op_blob.sql +++ b/contrib/dolphin/sql/test_op_blob.sql @@ -1,6 +1,5 @@ -drop database if exists test_op_blob; -create database test_op_blob dbcompatibility 'b'; -\c test_op_blob +create schema test_op_blob; +set current_schema to 'test_op_blob'; select '1'::blob ^ '1'::blob; select '1'::blob ^ '1'::char; @@ -17,5 +16,5 @@ select '1'::blob ^ 11::float; select '1'::blob ^ 11::float8; select '1'::blob ^ 11::numeric; -\c postgres -drop database test_op_blob; +drop schema test_op_blob cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_op_xor_boolandfloat.sql b/contrib/dolphin/sql/test_op_xor_boolandfloat.sql index 81fab36f5..197973bf4 100644 --- a/contrib/dolphin/sql/test_op_xor_boolandfloat.sql +++ b/contrib/dolphin/sql/test_op_xor_boolandfloat.sql @@ -1,6 +1,5 @@ -drop database if exists test_bool_float; -create database test_bool_float dbcompatibility 'b'; -\c test_bool_float +create schema test_bool_float; +set current_schema to 'test_bool_float'; set dolphin.b_compatibility_mode = true; select 1::bool ^ 2::int1; @@ -159,5 +158,5 @@ select (-1)::numeric ^ (-2)::float8; select (-1)::numeric ^ (-2)::numeric; set dolphin.b_compatibility_mode = false; -\c postgres -drop database test_bool_float; +drop schema test_bool_float cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_op_xor_unsignedint.sql b/contrib/dolphin/sql/test_op_xor_unsignedint.sql index d5e672932..8f4c4dd17 100644 --- a/contrib/dolphin/sql/test_op_xor_unsignedint.sql +++ b/contrib/dolphin/sql/test_op_xor_unsignedint.sql @@ -1,6 +1,5 @@ -drop database if exists test_op_xor_unsignedint; -create database test_op_xor_unsignedint with dbcompatibility='B'; -\c test_op_xor_unsignedint +create schema test_op_xor_unsignedint; +set current_schema to 'test_op_xor_unsignedint'; select (-1)::uint1 ^ 2::int1; select (-1)::uint1 ^ 2::int2; @@ -150,5 +149,5 @@ select 2 ::uint8 ^ 2::bool; select 2 ::uint8 ^ 2::char; select 2 ::uint8 ^ 2::varchar; -\c postgres -drop database test_op_xor_unsignedint; +drop schema test_op_xor_unsignedint cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_optimize.sql b/contrib/dolphin/sql/test_optimize.sql index 57f4d5c70..d420966c7 100644 --- a/contrib/dolphin/sql/test_optimize.sql +++ b/contrib/dolphin/sql/test_optimize.sql @@ -1,6 +1,5 @@ -drop database if exists db_optimize; -create database db_optimize dbcompatibility 'b'; -\c db_optimize +create schema db_optimize; +set current_schema to 'db_optimize'; create table doc(id serial primary key, content varchar(255)); insert into doc(content) select 'abcd1234' from generate_series(1,10000) as content; delete from doc where id < 9000; @@ -9,5 +8,5 @@ drop table doc; set xc_maintenance_mode = on; optimize table pg_class; set xc_maintenance_mode = off; -\c postgres -drop database if exists db_optimize; +drop schema db_optimize cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_proxy.sql b/contrib/dolphin/sql/test_proxy.sql index 33d0fd4e9..f33c4debb 100644 --- a/contrib/dolphin/sql/test_proxy.sql +++ b/contrib/dolphin/sql/test_proxy.sql @@ -1,6 +1,5 @@ -drop database if exists db_proxy; -create database db_proxy dbcompatibility 'b'; -\c db_proxy +create schema db_proxy; +set current_schema to 'db_proxy'; CREATE SCHEMA tst_schema1; SET SEARCH_PATH TO tst_schema1; @@ -72,6 +71,6 @@ drop role test_proxy_u1; drop role test_proxy_u2; drop role test_proxy_u3; -\c postgres -drop database if exists db_proxy; +drop schema db_proxy cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_schema.sql b/contrib/dolphin/sql/test_schema.sql index 6f2cc0d65..2cbe2ff19 100644 --- a/contrib/dolphin/sql/test_schema.sql +++ b/contrib/dolphin/sql/test_schema.sql @@ -1,8 +1,7 @@ -drop database if exists schema_test; -create database schema_test dbcompatibility 'b'; -\c schema_test +create schema schema_test; +set current_schema to 'schema_test'; SELECT SCHEMA(); -\c postgres -drop database if exists schema_test; +drop schema schema_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_set_charset.sql b/contrib/dolphin/sql/test_set_charset.sql index 81ee36079..98c3716c3 100644 --- a/contrib/dolphin/sql/test_set_charset.sql +++ b/contrib/dolphin/sql/test_set_charset.sql @@ -1,6 +1,5 @@ -drop database if exists db_charset; -create database db_charset dbcompatibility 'b'; -\c db_charset +create schema db_charset; +set current_schema to 'db_charset'; show client_encoding; set charset gbk; show client_encoding; @@ -19,5 +18,5 @@ set names 'gbk'; show client_encoding; set names default; show client_encoding; -\c postgres -drop database if exists db_charset; +drop schema db_charset cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows.sql b/contrib/dolphin/sql/test_shows.sql index 162585473..e78d1b29e 100644 --- a/contrib/dolphin/sql/test_shows.sql +++ b/contrib/dolphin/sql/test_shows.sql @@ -1,8 +1,7 @@ -drop database if exists db_show; -create database db_show dbcompatibility 'b'; -\c db_show +create schema db_show; +set current_schema to 'db_show'; show processlist; show full processlist; -\c postgres -drop database if exists db_show; +drop schema db_show cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows_3.sql b/contrib/dolphin/sql/test_shows_3.sql index aa3c204d2..11a05121c 100644 --- a/contrib/dolphin/sql/test_shows_3.sql +++ b/contrib/dolphin/sql/test_shows_3.sql @@ -1,6 +1,5 @@ -drop database if exists db_show_3; -create database db_show_3 dbcompatibility 'b'; -\c db_show_3 +create schema db_show_3; +set current_schema to 'db_show_3'; show databases; create schema aa1; create schema aa2; @@ -18,7 +17,8 @@ set role u1 password 'abc@1234'; show databases; set role u2 password 'abc@1234'; show databases; -\c postgres -drop database if exists db_show_3; +\c contrib_regression drop user u1; drop user u2; +drop schema db_show_3 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows_4.sql b/contrib/dolphin/sql/test_shows_4.sql index 8589d1294..ab8fe63c4 100644 --- a/contrib/dolphin/sql/test_shows_4.sql +++ b/contrib/dolphin/sql/test_shows_4.sql @@ -1,7 +1,6 @@ -drop database if exists db_show_4; -create database db_show_4 dbcompatibility 'b'; -\c db_show_4 +create schema db_show_4; +set current_schema to 'db_show_4'; show master status; show slave hosts; -\c postgres -drop database if exists db_show_4; +drop schema db_show_4 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_shows_5.sql b/contrib/dolphin/sql/test_shows_5.sql index f729d3b59..431e37ff0 100644 --- a/contrib/dolphin/sql/test_shows_5.sql +++ b/contrib/dolphin/sql/test_shows_5.sql @@ -1,11 +1,10 @@ -DROP DATABASE IF EXISTS db_show_5; -CREATE DATABASE db_show_5 DBCOMPATIBILITY 'b'; -\c db_show_5 +create schema db_show_5; +set current_schema to 'db_show_5'; CREATE SCHEMA tst_schema5; --orientation=row, normal primary key -CREATE TABLE public.t1 +CREATE TABLE db_show_5.t1 ( id int primary key, name varchar(20), @@ -13,7 +12,7 @@ phone text ) WITH(ORIENTATION=ROW, STORAGE_TYPE=USTORE); --orientation=column, serial primary key -CREATE TABLE public.t2 +CREATE TABLE db_show_5.t2 ( id serial primary key, name varchar(20), @@ -70,5 +69,5 @@ REVOKE SELECT ON ALL TABLES IN SCHEMA tst_schema5 FROM tst_shows_u5; REVOKE SELECT ON ALL SEQUENCES IN SCHEMA tst_schema5 FROM tst_shows_u5; DROP USER tst_shows_u5; -\c postgres -DROP DATABASE IF EXISTS db_show_5; +drop schema db_show_5 cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/test_system_user.sql b/contrib/dolphin/sql/test_system_user.sql index 7fb4f83f5..a5bb529bd 100644 --- a/contrib/dolphin/sql/test_system_user.sql +++ b/contrib/dolphin/sql/test_system_user.sql @@ -1,10 +1,9 @@ -drop database if exists test_system_user; -create database test_system_user dbcompatibility 'b'; -\c test_system_user +create schema test_system_user; +set current_schema to 'test_system_user'; select session_user; select session_user(); select user; select user(); select system_user(); -\c postgres -drop database test_system_user; \ No newline at end of file +drop schema test_system_user cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/test_table_index.sql b/contrib/dolphin/sql/test_table_index.sql index 3d56fa88e..6697805d2 100644 --- a/contrib/dolphin/sql/test_table_index.sql +++ b/contrib/dolphin/sql/test_table_index.sql @@ -1,7 +1,5 @@ ----- b compatibility case -drop database if exists test_table_index; -create database test_table_index dbcompatibility 'b'; -\c test_table_index +create schema test_table_index; +set current_schema to 'test_table_index'; -- test crate normal table create table t1(f1 int , index(f1)); @@ -374,5 +372,5 @@ alter table test_option1 add key ixd_at12 using btree (b) using aaa; alter table test_option1 add key ixd_at13 using btree (b) using aaa using btree; alter table test_option1 add key ixd_at14 using btree (b) comment 'xx' using aaa using btree; -\c contrib_regression -DROP DATABASE test_table_index; \ No newline at end of file +drop schema test_table_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_agg.sql b/contrib/dolphin/sql/tinyint_agg.sql index bdc02437b..cb30f7dd5 100644 --- a/contrib/dolphin/sql/tinyint_agg.sql +++ b/contrib/dolphin/sql/tinyint_agg.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_agg; -create database tinyint_agg dbcompatibility 'b'; -\c tinyint_agg +create schema tinyint_agg; +set current_schema to 'tinyint_agg'; create table u1(a int1, b int2); @@ -48,5 +47,5 @@ explain(costs off, verbose) select variance(a)from smp_test; explain(costs off, verbose) select listagg(a) within group(order by a) from smp_test; explain(costs off, verbose) select listagg(a, ',') within group(order by a) from smp_test; -\c postgres -drop database tinyint_agg; \ No newline at end of file +drop schema tinyint_agg cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_cast.sql b/contrib/dolphin/sql/tinyint_cast.sql index 336a38bb6..7dd03b846 100644 --- a/contrib/dolphin/sql/tinyint_cast.sql +++ b/contrib/dolphin/sql/tinyint_cast.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_cast; -create database tinyint_cast dbcompatibility 'b'; -\c tinyint_cast +create schema tinyint_cast; +set current_schema to 'tinyint_cast'; create table t1(a int1); @@ -99,5 +98,5 @@ select (-1)::text::int1; select '127'::text::int1; select '-128'::text::int1; -\c postgres -drop database tinyint_cast; \ No newline at end of file +drop schema tinyint_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_index.sql b/contrib/dolphin/sql/tinyint_index.sql index dabc8af4a..cf317a8fc 100644 --- a/contrib/dolphin/sql/tinyint_index.sql +++ b/contrib/dolphin/sql/tinyint_index.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_index; -create database tinyint_index dbcompatibility 'b'; -\c tinyint_index +create schema tinyint_index; +set current_schema to 'tinyint_index'; create table t1(a int1); insert into t1 select generate_series(-128, 127); @@ -76,4 +75,7 @@ analyze t1; explain(costs off, verbose)select * from t1 where a >= -1::int1 and a <= 0::int1; explain(costs off, verbose)select * from t1 where a >= -1::int2 and a <= 0::int2; explain(costs off, verbose)select * from t1 where a >= -1::int4 and a <= 0::int4; -explain(costs off, verbose)select * from t1 where a >= -1::int8 and a <= 0::int8; \ No newline at end of file +explain(costs off, verbose)select * from t1 where a >= -1::int8 and a <= 0::int8; + +drop schema tinyint_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_operator.sql b/contrib/dolphin/sql/tinyint_operator.sql index 2aa4b9656..e61c996cf 100644 --- a/contrib/dolphin/sql/tinyint_operator.sql +++ b/contrib/dolphin/sql/tinyint_operator.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_operator; -create database tinyint_operator dbcompatibility 'b'; -\c tinyint_operator +create schema tinyint_operator; +set current_schema to 'tinyint_operator'; select 1::int1 + 1::int1; select (-1)::int1 + (-1)::int1; @@ -62,5 +61,5 @@ select @(-1)::int1; select @127::int1; select @(-128)::int1; -\c postgres -drop database tinyint_operator; \ No newline at end of file +drop schema tinyint_operator cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_partition.sql b/contrib/dolphin/sql/tinyint_partition.sql index cd7f1ab2c..2b6e6a659 100644 --- a/contrib/dolphin/sql/tinyint_partition.sql +++ b/contrib/dolphin/sql/tinyint_partition.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_partition; -create database tinyint_partition dbcompatibility 'b'; -\c tinyint_partition +create schema tinyint_partition; +set current_schema to 'tinyint_partition'; CREATE TABLE t1 ( @@ -205,5 +204,5 @@ insert into start_end1 values(1); insert into start_end1 values(127); select * from start_end1; -\c postgres -drop database tinyint_partition; \ No newline at end of file +drop schema tinyint_partition cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/tinyint_smp_join_procedure.sql b/contrib/dolphin/sql/tinyint_smp_join_procedure.sql index 53b00cbd9..a8506efe9 100644 --- a/contrib/dolphin/sql/tinyint_smp_join_procedure.sql +++ b/contrib/dolphin/sql/tinyint_smp_join_procedure.sql @@ -1,6 +1,5 @@ -drop database if exists tinyint_smp; -create database tinyint_smp dbcompatibility 'b'; -\c tinyint_smp +create schema tinyint_smp; +set current_schema to 'tinyint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; @@ -52,5 +51,5 @@ select test_p1(1, 3); select test_p1(-1, -3); select * from test1; -\c postgres -drop database tinyint_smp; \ No newline at end of file +drop schema tinyint_smp cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_agg.sql b/contrib/dolphin/sql/uint_agg.sql index 33f4b6077..b65f89e36 100644 --- a/contrib/dolphin/sql/uint_agg.sql +++ b/contrib/dolphin/sql/uint_agg.sql @@ -1,6 +1,5 @@ -drop database if exists uint_agg; -create database uint_agg dbcompatibility 'b'; -\c uint_agg +create schema uint_agg; +set current_schema to 'uint_agg'; --uint1 create table u1(a uint1, b int2); @@ -104,5 +103,5 @@ explain(costs off, verbose) select variance(a), variance(b) from smp_test; explain(costs off, verbose) select listagg(a) within group(order by a) from smp_test; explain(costs off, verbose) select listagg(a, ',') within group(order by a) from smp_test; -\c postgres -drop database uint_agg; \ No newline at end of file +drop schema uint_agg cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_and.sql b/contrib/dolphin/sql/uint_and.sql index f787f78c6..adc5557d9 100644 --- a/contrib/dolphin/sql/uint_and.sql +++ b/contrib/dolphin/sql/uint_and.sql @@ -1,6 +1,5 @@ -drop database if exists uint_and; -create database uint_and dbcompatibility 'b'; -\c uint_and +create schema uint_and; +set current_schema to 'uint_and'; --uint8 select 18446744073709551615::uint8 & 0::int1; @@ -174,5 +173,5 @@ select 127::int1 & 1::uint2; select 127::int1 & 1::uint4; select 127::int1 & 1::uint8; -\c postgres -drop database uint_and \ No newline at end of file +drop schema uint_and cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_auto_increment.sql b/contrib/dolphin/sql/uint_auto_increment.sql index c5e4cce1f..2f58788ef 100644 --- a/contrib/dolphin/sql/uint_auto_increment.sql +++ b/contrib/dolphin/sql/uint_auto_increment.sql @@ -1,7 +1,5 @@ - --- create b db -create database uint_auto_increment with dbcompatibility = 'B'; -\c uint_auto_increment +create schema uint_auto_increment; +set current_schema to 'uint_auto_increment'; -- test CREATE TABLE with AUTO_INCREMENT -- syntax error CREATE TABLE test_create_autoinc_err(id int unsigned auto_increment key, name varchar(200),a int unsigned); @@ -802,5 +800,5 @@ SELECT col1,col2 FROM test_autoinc_insert_select ORDER BY 1; drop table test_autoinc_source; drop table test_autoinc_insert_select; -\c postgres -drop database if exists uint_auto_increment; \ No newline at end of file +drop schema uint_auto_increment cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_cast.sql b/contrib/dolphin/sql/uint_cast.sql index a98639851..06371b512 100644 --- a/contrib/dolphin/sql/uint_cast.sql +++ b/contrib/dolphin/sql/uint_cast.sql @@ -1,6 +1,5 @@ -drop database if exists uint_cast; -create database uint_cast dbcompatibility 'b'; -\c uint_cast +create schema uint_cast; +set current_schema to 'uint_cast'; select (-1)::bool::uint8; select (0)::bool::uint8; @@ -284,5 +283,5 @@ select (1)::uint4::bool; select (0)::uint8::bool; select (1)::uint8::bool; -\c postgres -drop database uint_cast; \ No newline at end of file +drop schema uint_cast cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_cast2.sql b/contrib/dolphin/sql/uint_cast2.sql index a04fefc95..71ea16cf8 100644 --- a/contrib/dolphin/sql/uint_cast2.sql +++ b/contrib/dolphin/sql/uint_cast2.sql @@ -1,6 +1,5 @@ -drop database if exists uint_cast2; -create database uint_cast2 dbcompatibility 'b'; -\c uint_cast2 +create schema uint_cast2; +set current_schema to 'uint_cast2'; drop table if exists t1 ; create table t1(a uint8); @@ -313,5 +312,5 @@ insert into t1 values((-1)::uint1); insert into t1 values(255::uint1); insert into t1 values(256::uint1); -\c postgres -drop database uint_cast2; \ No newline at end of file +drop schema uint_cast2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_cast3.sql b/contrib/dolphin/sql/uint_cast3.sql index 787e08e63..491b34d2d 100644 --- a/contrib/dolphin/sql/uint_cast3.sql +++ b/contrib/dolphin/sql/uint_cast3.sql @@ -1,6 +1,5 @@ -drop database if exists uint_cast3; -create database uint_cast3 dbcompatibility 'b'; -\c uint_cast3 +create schema uint_cast3; +set current_schema to 'uint_cast3'; select 1::uint1::int16; select 1::int16::uint1; select 1::uint2::int16; @@ -95,5 +94,5 @@ select '65536'::text::uint2; select '4294967296'::text::uint4; select '18446744073709551616'::text::uint8; -\c postgres -drop database uint_cast3; \ No newline at end of file +drop schema uint_cast3 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_div.sql b/contrib/dolphin/sql/uint_div.sql index 6a07a9ae7..7a0fa9498 100644 --- a/contrib/dolphin/sql/uint_div.sql +++ b/contrib/dolphin/sql/uint_div.sql @@ -1,6 +1,5 @@ -drop database if exists uint_div; -create database uint_div dbcompatibility 'b'; -\c uint_div +create schema uint_div; +set current_schema to 'uint_div'; --uint8 select 18446744073709551615::uint8 / 0::int1; @@ -138,5 +137,5 @@ select 127::int1 / 1::uint2; select 127::int1 / 1::uint4; select 127::int1 / 1::uint8; -\c postgres -drop database uint_div \ No newline at end of file +drop schema uint_div cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_ignore.sql b/contrib/dolphin/sql/uint_ignore.sql index aaa8a487a..713120ffa 100644 --- a/contrib/dolphin/sql/uint_ignore.sql +++ b/contrib/dolphin/sql/uint_ignore.sql @@ -1,6 +1,5 @@ -drop database if exists uint_ignore; -create database uint_ignore dbcompatibility 'b'; -\c uint_ignore +create schema uint_ignore; +set current_schema to 'uint_ignore'; drop table if exists t1 ; create table t1(a uint8); @@ -295,5 +294,5 @@ insert ignore into t1 values(256::uint1); select * from t1; -\c postgres -drop database uint_ignore; \ No newline at end of file +drop schema uint_ignore cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_in.sql b/contrib/dolphin/sql/uint_in.sql index 8662ec342..9d9aca127 100644 --- a/contrib/dolphin/sql/uint_in.sql +++ b/contrib/dolphin/sql/uint_in.sql @@ -1,6 +1,5 @@ -drop database if exists uint_in; -create database uint_in dbcompatibility 'b'; -\c uint_in +create schema uint_in; +set current_schema to 'uint_in'; create table t1(a uint1); create table t2(a uint2); @@ -31,5 +30,5 @@ insert into t4 values('1.5'); select * from t4; -\c postgres -drop database uint_in \ No newline at end of file +drop schema uint_in cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_index.sql b/contrib/dolphin/sql/uint_index.sql index 98fd6cf49..6e0ad70e7 100644 --- a/contrib/dolphin/sql/uint_index.sql +++ b/contrib/dolphin/sql/uint_index.sql @@ -1,6 +1,5 @@ -drop database if exists uint_index; -create database uint_index dbcompatibility 'b'; -\c uint_index +create schema uint_index; +set current_schema to 'uint_index'; create table t1(a uint1); insert into t1 select generate_series(1, 255); @@ -220,5 +219,5 @@ explain(costs off, verbose)select * from t4 where a = 1::int2; explain(costs off, verbose)select * from t4 where a = 1::int4; explain(costs off, verbose)select * from t4 where a = 1::int8; -\c postgres -drop database uint_index; \ No newline at end of file +drop schema uint_index cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_join.sql b/contrib/dolphin/sql/uint_join.sql index a74b23453..32927eb90 100644 --- a/contrib/dolphin/sql/uint_join.sql +++ b/contrib/dolphin/sql/uint_join.sql @@ -1,6 +1,5 @@ -drop database if exists uint_join; -create database uint_join dbcompatibility 'b'; -\c uint_join +create schema uint_join; +set current_schema to 'uint_join'; create table t1(a int2, b uint2); create table t2(a uint4, b uint4); @@ -22,5 +21,5 @@ select /*+ nestloop(t1 t2)*/ * from t1 join t2; select /*+ hashjoin(t1 t2)*/ * from t1 join t2; select /*+ mergejoin(t1 t2)*/ * from t1 join t2; -\c postgres -drop database uint_join; \ No newline at end of file +drop schema uint_join cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mi.sql b/contrib/dolphin/sql/uint_mi.sql index cd53a9354..7f665fc4b 100644 --- a/contrib/dolphin/sql/uint_mi.sql +++ b/contrib/dolphin/sql/uint_mi.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mi; -create database uint_mi dbcompatibility 'b'; -\c uint_mi +create schema uint_mi; +set current_schema to 'uint_mi'; --uint8 select 18446744073709551615::uint8 - 0::int1; @@ -221,5 +220,5 @@ select 0::int1 - 1::uint2; select 0::int1 - 1::uint4; select 0::int1 - 1::uint8; -\c postgres -drop database uint_mi \ No newline at end of file +drop schema uint_mi cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mod.sql b/contrib/dolphin/sql/uint_mod.sql index 0f325ad81..8aa0273bd 100644 --- a/contrib/dolphin/sql/uint_mod.sql +++ b/contrib/dolphin/sql/uint_mod.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mod; -create database uint_mod dbcompatibility 'b'; -\c uint_mod +create schema uint_mod; +set current_schema to 'uint_mod'; --uint8 select 18446744073709551615::uint8 % 0::int1; @@ -210,5 +209,5 @@ select 127::int1 % 1::uint2; select 127::int1 % 1::uint4; select 127::int1 % 1::uint8; -\c postgres -drop database uint_mod \ No newline at end of file +drop schema uint_mod cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mod2.sql b/contrib/dolphin/sql/uint_mod2.sql index 2fc7e4aa1..02150d703 100644 --- a/contrib/dolphin/sql/uint_mod2.sql +++ b/contrib/dolphin/sql/uint_mod2.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mod2; -create database uint_mod2 dbcompatibility 'b'; -\c uint_mod2 +create schema uint_mod2; +set current_schema to 'uint_mod2'; --uint8 select 18446744073709551615::uint8 mod 0::int1; @@ -210,5 +209,5 @@ select 127::int1 mod 1::uint2; select 127::int1 mod 1::uint4; select 127::int1 mod 1::uint8; -\c postgres -drop database uint_mod2 \ No newline at end of file +drop schema uint_mod2 cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_mul.sql b/contrib/dolphin/sql/uint_mul.sql index 8c10cc85d..a4e249015 100644 --- a/contrib/dolphin/sql/uint_mul.sql +++ b/contrib/dolphin/sql/uint_mul.sql @@ -1,6 +1,5 @@ -drop database if exists uint_mul; -create database uint_mul dbcompatibility 'b'; -\c uint_mul +create schema uint_mul; +set current_schema to 'uint_mul'; --uint8 select 18446744073709551615::uint8 * 0::int1; @@ -138,5 +137,5 @@ select 127::int1 * 1::uint2; select 127::int1 * 1::uint4; select 127::int1 * 1::uint8; -\c postgres -drop database uint_mul \ No newline at end of file +drop schema uint_mul cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_numeric.sql b/contrib/dolphin/sql/uint_numeric.sql index 4b021d484..3017d47be 100644 --- a/contrib/dolphin/sql/uint_numeric.sql +++ b/contrib/dolphin/sql/uint_numeric.sql @@ -1,6 +1,5 @@ -drop database if exists uint_numeric; -create database uint_numeric dbcompatibility 'b'; -\c uint_numeric +create schema uint_numeric; +set current_schema to 'uint_numeric'; select (-1)::numeric::uint1; select (-1)::numeric::uint2; @@ -103,5 +102,5 @@ insert into t4 select b from num; insert into t4 select c from num; -\c postgres -drop database uint_numeric; \ No newline at end of file +drop schema uint_numeric cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_operator.sql b/contrib/dolphin/sql/uint_operator.sql index f3ca9e662..a1b27024a 100644 --- a/contrib/dolphin/sql/uint_operator.sql +++ b/contrib/dolphin/sql/uint_operator.sql @@ -1,6 +1,5 @@ -drop database if exists uint_operator; -create database uint_operator dbcompatibility 'b'; -\c uint_operator +create schema uint_operator; +set current_schema to 'uint_operator'; -- > select 1::uint1 > 1::uint1; @@ -367,6 +366,6 @@ select ~0::uint2; select ~0::uint4; select ~0::uint8; -\c postgres -drop database uint_operator; +drop schema uint_operator cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/uint_or.sql b/contrib/dolphin/sql/uint_or.sql index 0d1b1cca1..610a2793c 100644 --- a/contrib/dolphin/sql/uint_or.sql +++ b/contrib/dolphin/sql/uint_or.sql @@ -1,6 +1,5 @@ -drop database if exists uint_or; -create database uint_or dbcompatibility 'b'; -\c uint_or +create schema uint_or; +set current_schema to 'uint_or'; --uint8 select 18446744073709551615::uint8 | 0::int1; @@ -174,5 +173,5 @@ select 127::int1 | 1::uint2; select 127::int1 | 1::uint4; select 127::int1 | 1::uint8; -\c postgres -drop database uint_or \ No newline at end of file +drop schema uint_or cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_partition.sql b/contrib/dolphin/sql/uint_partition.sql index 44d02e60a..e4f3ef22e 100644 --- a/contrib/dolphin/sql/uint_partition.sql +++ b/contrib/dolphin/sql/uint_partition.sql @@ -1,6 +1,5 @@ -drop database if exists uint_partition; -create database uint_partition dbcompatibility 'b'; -\c uint_partition +create schema uint_partition; +set current_schema to 'uint_partition'; CREATE TABLE t1 ( @@ -217,5 +216,5 @@ create table t_unsigned_0030_8(col01 bigint unsigned) partition by range(col01)(partition p start(1) end(255) every(50)); insert into t_unsigned_0030_8 values(1); -\c postgres -drop database uint_partition; \ No newline at end of file +drop schema uint_partition cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_pl.sql b/contrib/dolphin/sql/uint_pl.sql index c79d8eb19..2e3915244 100644 --- a/contrib/dolphin/sql/uint_pl.sql +++ b/contrib/dolphin/sql/uint_pl.sql @@ -1,6 +1,5 @@ -drop database if exists uint_add; -create database uint_add dbcompatibility 'b'; -\c uint_add +create schema uint_add; +set current_schema to 'uint_add'; --uint8 select 18446744073709551615::uint8 + 0::int1; @@ -232,5 +231,5 @@ select 127::int1 + null::uint8; select 127::int1 + 65535::uint2; select 127::int1 + 4294967295::uint4; select 127::int1 + 18446744073709551615::uint8; -\c postgres -drop database uint_add \ No newline at end of file +drop schema uint_add cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_procedure_col_bypass.sql b/contrib/dolphin/sql/uint_procedure_col_bypass.sql index 722408a1c..27fc486b5 100644 --- a/contrib/dolphin/sql/uint_procedure_col_bypass.sql +++ b/contrib/dolphin/sql/uint_procedure_col_bypass.sql @@ -1,6 +1,5 @@ -drop database if exists uint_procedure_col; -create database uint_procedure_col dbcompatibility 'b'; -\c uint_procedure_col +create schema uint_procedure_col; +set current_schema to 'uint_procedure_col'; create procedure test_p1(uint2, uint4) SHIPPABLE VOLATILE @@ -32,5 +31,5 @@ explain(costs off, verbose) select b from bypass where a = 1; explain(costs off, verbose) delete from bypass where b = 10; explain(costs off, verbose) update bypass set b = b + 1 where a = 1; -\c postgres -drop database uint_procedure_col; \ No newline at end of file +drop schema uint_procedure_col cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_smp.sql b/contrib/dolphin/sql/uint_smp.sql index f66a53b67..833598da3 100644 --- a/contrib/dolphin/sql/uint_smp.sql +++ b/contrib/dolphin/sql/uint_smp.sql @@ -1,6 +1,5 @@ -drop database if exists uint_smp; -create database uint_smp dbcompatibility 'b'; -\c uint_smp +create schema uint_smp; +set current_schema to 'uint_smp'; set enable_opfusion = on; set opfusion_debug_mode = log; @@ -85,5 +84,5 @@ explain(costs off, verbose) select /*+ nestloop(join_1 join_2)*/ * from join_1 l explain(costs off, verbose) select /*+ hashjoin(join_1 join_2)*/ * from join_1 left join join_2 on join_1.a = join_2.a; explain(costs off, verbose) select /*+ mergejoin(join_1 join_2)*/ * from join_1 left join join_2 on join_1.a = join_2.a; -\c postgres -drop database uint_smp; \ No newline at end of file +drop schema uint_smp cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_sql_mode.sql b/contrib/dolphin/sql/uint_sql_mode.sql index b1f3454b9..6bdfbbf09 100644 --- a/contrib/dolphin/sql/uint_sql_mode.sql +++ b/contrib/dolphin/sql/uint_sql_mode.sql @@ -1,6 +1,5 @@ -drop database if exists uint_sql_mode; -create database uint_sql_mode dbcompatibility 'b'; -\c uint_sql_mode +create schema uint_sql_mode; +set current_schema to 'uint_sql_mode'; set dolphin.sql_mode = ''; @@ -563,5 +562,5 @@ insert into t1 values(256::uint1); select * from t1; -\c postgres -drop database uint_sql_mode; \ No newline at end of file +drop schema uint_sql_mode cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/uint_xor.sql b/contrib/dolphin/sql/uint_xor.sql index 920e42894..009738af2 100644 --- a/contrib/dolphin/sql/uint_xor.sql +++ b/contrib/dolphin/sql/uint_xor.sql @@ -1,6 +1,5 @@ -drop database if exists uint_xor; -create database uint_xor dbcompatibility 'b'; -\c uint_xor +create schema uint_xor; +set current_schema to 'uint_xor'; --uint8 select 18446744073709551615::uint8 # 0::int1; @@ -174,5 +173,5 @@ select 127::int1 # 1::uint2; select 127::int1 # 1::uint4; select 127::int1 # 1::uint8; -\c postgres -drop database uint_xor \ No newline at end of file +drop schema uint_xor cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/upsert.sql b/contrib/dolphin/sql/upsert.sql index 65b719775..02489ed6c 100644 --- a/contrib/dolphin/sql/upsert.sql +++ b/contrib/dolphin/sql/upsert.sql @@ -1,6 +1,5 @@ -drop database if exists upsert; -create database upsert dbcompatibility 'b'; -\c upsert +create schema upsert; +set current_schema to 'upsert'; --normal test @@ -469,5 +468,5 @@ INSERT INTO subpartition_03 VALUES (1, 1, '1', 1) ON DUPLICATE KEY UPDATE col_2 select * from subpartition_03; INSERT INTO subpartition_03 VALUES (1, 2, '1', 1) ON DUPLICATE KEY UPDATE col_1 = 2; select * from subpartition_03; -\c postgres -drop database upsert \ No newline at end of file +drop schema upsert cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/use_dbname.sql b/contrib/dolphin/sql/use_dbname.sql index c8cc7bf30..dd21c7a3c 100644 --- a/contrib/dolphin/sql/use_dbname.sql +++ b/contrib/dolphin/sql/use_dbname.sql @@ -1,6 +1,5 @@ -drop database if exists use_dbname; -create database use_dbname dbcompatibility 'b'; -\c use_dbname +create schema use_dbname; +set current_schema to 'use_dbname'; CREATE schema db1; CREATE schema db2; USE db1; @@ -14,5 +13,5 @@ select a from db2.test; select a from test; USE db1; select a from test; -\c postgres -drop database if exists use_dbname; \ No newline at end of file +drop schema use_dbname cascade; +reset current_schema; \ No newline at end of file diff --git a/contrib/dolphin/sql/vec_engine.sql b/contrib/dolphin/sql/vec_engine.sql index 7a01b7957..3b1acc8e6 100644 --- a/contrib/dolphin/sql/vec_engine.sql +++ b/contrib/dolphin/sql/vec_engine.sql @@ -1,5 +1,5 @@ -create database vec_engine_test dbcompatibility 'b'; -\c vec_engine_test +create schema vec_engine_test; +set current_schema to 'vec_engine_test'; CREATE TABLE customer ( c_custkey integer NOT NULL, c_name character varying(25) NOT NULL, @@ -102,5 +102,5 @@ explain (costs off) select n_name order by revenue desc; -\c postgres -drop database vec_engine_test; +drop schema vec_engine_test cascade; +reset current_schema; diff --git a/contrib/dolphin/sql/zerofill.sql b/contrib/dolphin/sql/zerofill.sql index e3d2b289a..dc48323b0 100644 --- a/contrib/dolphin/sql/zerofill.sql +++ b/contrib/dolphin/sql/zerofill.sql @@ -1,6 +1,5 @@ -drop database if exists db_zerofill; -create database db_zerofill dbcompatibility 'b'; -\c db_zerofill +create schema db_zerofill; +set current_schema to 'db_zerofill'; create table t1_zerofill ( a int(5) zerofill, @@ -20,5 +19,5 @@ create table t1_zerofill ( create table t2_zerofill (a float zerofill); create table t2_zerofill (a double precision zerofill); -\c postgres -drop database if exists db_zerofill; \ No newline at end of file +drop schema db_zerofill cascade; +reset current_schema; \ No newline at end of file -- Gitee