From cc73d9dfbe507f11696864e0815738072b3b493b Mon Sep 17 00:00:00 2001 From: Ajin Cherian Date: Thu, 23 Feb 2023 05:55:53 -0500 Subject: [PATCH v74 4/8] Introduce the test_ddl_deparse_regress test module. This testing module aims to achieve the following four testing goals for the DDL deparser: 1. Test that the generated JSON blob is expected using SQL tests. 2. Test that the re-formed DDL command is expected using SQL tests. 3. Test that the re-formed DDL command has the same effect as the original command by comparing the results of pg_dump, using the SQL tests in 1 and 2. 4. Test that new DDL syntax is handled by the DDL deparser by capturing and deparing DDL commands ran by pg_regress. 1 and 2 is tested with SQL tests, by printing the deparsed JSON blob and the re-formed command. Goal 3 is tested with TAP framework in t/001_compare_dumped_results.pl Goal 4 is tested with TAP framework and pg_regress in 002_regress_tests.pl, the execution is currently commented out because it will fail due unimplemented commands in the DDL deparser. --- src/test/modules/Makefile | 1 + .../test_ddl_deparse_regress/.gitignore | 4 + .../modules/test_ddl_deparse_regress/Makefile | 44 ++++ .../test_ddl_deparse_regress/README.md | 22 ++ .../expected/aggregate.out | 8 + .../expected/create_extension.out | 6 + .../expected/create_schema.out | 22 ++ .../expected/create_table.out | 36 +++ .../expected/test_ddl_deparse.out | 18 ++ .../test_ddl_deparse_regress/meson.build | 42 ++++ .../sql/aggregate.sql | 7 + .../sql/create_extension.sql | 5 + .../sql/create_schema.sql | 16 ++ .../sql/create_table.sql | 16 ++ .../sql/test_ddl_deparse.sql | 20 ++ .../t/001_compare_dumped_results.pl | 181 +++++++++++++++ .../t/002_regress_tests.pl | 207 ++++++++++++++++++ .../test_ddl_deparse_regress--1.0.sql | 9 + .../test_ddl_deparse_regress.c | 59 +++++ .../test_ddl_deparse_regress.control | 4 + 20 files changed, 727 insertions(+) create mode 100644 src/test/modules/test_ddl_deparse_regress/.gitignore create mode 100644 src/test/modules/test_ddl_deparse_regress/Makefile create mode 100644 src/test/modules/test_ddl_deparse_regress/README.md create mode 100644 src/test/modules/test_ddl_deparse_regress/expected/aggregate.out create mode 100644 src/test/modules/test_ddl_deparse_regress/expected/create_extension.out create mode 100644 src/test/modules/test_ddl_deparse_regress/expected/create_schema.out create mode 100644 src/test/modules/test_ddl_deparse_regress/expected/create_table.out create mode 100644 src/test/modules/test_ddl_deparse_regress/expected/test_ddl_deparse.out create mode 100644 src/test/modules/test_ddl_deparse_regress/meson.build create mode 100644 src/test/modules/test_ddl_deparse_regress/sql/aggregate.sql create mode 100644 src/test/modules/test_ddl_deparse_regress/sql/create_extension.sql create mode 100644 src/test/modules/test_ddl_deparse_regress/sql/create_schema.sql create mode 100644 src/test/modules/test_ddl_deparse_regress/sql/create_table.sql create mode 100644 src/test/modules/test_ddl_deparse_regress/sql/test_ddl_deparse.sql create mode 100644 src/test/modules/test_ddl_deparse_regress/t/001_compare_dumped_results.pl create mode 100644 src/test/modules/test_ddl_deparse_regress/t/002_regress_tests.pl create mode 100644 src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress--1.0.sql create mode 100644 src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.c create mode 100644 src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.control diff --git a/src/test/modules/Makefile b/src/test/modules/Makefile index c629cbe383..c272c64a20 100644 --- a/src/test/modules/Makefile +++ b/src/test/modules/Makefile @@ -18,6 +18,7 @@ SUBDIRS = \ test_copy_callbacks \ test_custom_rmgrs \ test_ddl_deparse \ + test_ddl_deparse_regress \ test_extensions \ test_ginpostinglist \ test_integerset \ diff --git a/src/test/modules/test_ddl_deparse_regress/.gitignore b/src/test/modules/test_ddl_deparse_regress/.gitignore new file mode 100644 index 0000000000..5dcb3ff972 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/.gitignore @@ -0,0 +1,4 @@ +# Generated subdirectories +/log/ +/results/ +/tmp_check/ diff --git a/src/test/modules/test_ddl_deparse_regress/Makefile b/src/test/modules/test_ddl_deparse_regress/Makefile new file mode 100644 index 0000000000..5d5a9e8652 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/Makefile @@ -0,0 +1,44 @@ +#------------------------------------------------------------------------- +# +# Makefile for src/test/modules/test_ddl_deparse_regress +# +# Portions Copyright (c) 1996-2022, PostgreSQL Global Development Group +# Portions Copyright (c) 1994, Regents of the University of California +# +# src/test/modules/test_ddl_deparse_regress/Makefile +# +#------------------------------------------------------------------------- + + +MODULES = test_ddl_deparse_regress +PGFILEDESC = "test_ddl_deparse_regress - regression testing for DDL deparsing" + +EXTENSION = test_ddl_deparse_regress +DATA = test_ddl_deparse_regress--1.0.sql + +# test_ddl_deparse must be first +REGRESS = test_ddl_deparse \ + create_extension \ + create_schema \ + create_table \ + aggregate + +export REGRESS + +EXTRA_INSTALL = contrib/pg_stat_statements + +TAP_TESTS = 1 + +ifdef USE_PGXS +PG_CONFIG = pg_config +PGXS := $(shell $(PG_CONFIG) --pgxs) +include $(PGXS) +else +subdir = src/test/modules/test_ddl_deparse_regress +top_builddir = ../../../.. +include $(top_builddir)/src/Makefile.global +include $(top_srcdir)/contrib/contrib-global.mk +endif + +REGRESS_SHLIB=$(abs_top_builddir)/src/test/regress/regress$(DLSUFFIX) +export REGRESS_SHLIB diff --git a/src/test/modules/test_ddl_deparse_regress/README.md b/src/test/modules/test_ddl_deparse_regress/README.md new file mode 100644 index 0000000000..cc2519895a --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/README.md @@ -0,0 +1,22 @@ +# Testing harness for DDL deparser + +## Testing goals + +DDL Deparser provides the ability to encode the original DDL command to a JSON string, then decode it to a fully schema-qualified DDL command which is supposed to have the same effect as the original command. This testing module aims to achieve the following four testing for the DDL deparser: + +1. Test that the generated JSON blob is expected using SQL tests. +2. Test that the re-formed DDL command is expected using SQL tests. +3. Test that the re-formed DDL command has the same effect as the original command + by comparing the results of pg_dump, using the SQL tests in 1 and 2. +4. Test that new DDL syntax is handled by the DDL deparser by capturing and deparing + DDL commands ran by pg_regress. + +1 and 2 is tested with SQL tests, by noticing the deparsed JSON blob and the re-formed command. + +Goal 3 is tested with TAP framework in t/001_compare_dumped_results.pl + +Goal 4 is tested with TAP framework and pg_regress in 002_regress_tests.pl + +## Usage + +Run `make check`, it will run the SQL tests first, then it will run the TAP tests. The execution of 002_regress_tests.pl is currently commented out because it will fail due to unimplemented commands in the DDL deparser. diff --git a/src/test/modules/test_ddl_deparse_regress/expected/aggregate.out b/src/test/modules/test_ddl_deparse_regress/expected/aggregate.out new file mode 100644 index 0000000000..1e19a7cee1 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/expected/aggregate.out @@ -0,0 +1,8 @@ +CREATE AGGREGATE newavg(int4) ( + sfunc = int4_avg_accum, stype = _int8, + finalfunc = int8_avg, + initcond1 = '{0,0}' +); +NOTICE: deparsed json: {"fmt": "CREATE AGGREGATE %{identity}D (%{types}s) (%{elems:, }s)", "elems": [{"fmt": "SFUNC=%{procedure}D", "procedure": {"objname": "int4_avg_accum", "schemaname": "pg_catalog"}}, {"fmt": "STYPE=%{type}T", "type": {"typmod": "", "typarray": true, "typename": "int8", "schemaname": "pg_catalog"}}, {"fmt": "SSPACE=", "present": false}, {"fmt": "FINALFUNC= %{procedure}D", "procedure": {"objname": "int8_avg", "schemaname": "pg_catalog"}}, {"fmt": "FINALFUNC_EXTRA=", "present": false}, {"fmt": "INITCOND= %{initval}L", "initval": "{0,0}"}, {"fmt": "MSFUNC=", "present": false}, {"fmt": "MSTYPE=", "present": false}, {"fmt": "MSSPACE=", "present": false}, {"fmt": "MINVFUNC=", "present": false}, {"fmt": "MFINALFUNC=", "present": false}, {"fmt": "MFINALFUNC_EXTRA=", "present": false}, {"fmt": "MINITCOND=", "present": false}, {"fmt": "HYPOTHETICAL=", "present": false}, {"fmt": "SORTOP=", "present": false}], "types": {"fmt": "%{direct:, }s", "direct": [{"fmt": "%{mode}s %{name}s %{type}T", "mode": "", "name": "", "type": {"typmod": "", "typarray": false, "typename": "int4", "schemaname": "pg_catalog"}}]}, "identity": {"objname": "newavg", "schemaname": "public"}} +NOTICE: re-formed command: CREATE AGGREGATE public.newavg ( pg_catalog.int4) (SFUNC=pg_catalog.int4_avg_accum, STYPE=pg_catalog.int8[], FINALFUNC= pg_catalog.int8_avg, INITCOND= '{0,0}') +DROP AGGREGATE newavg(int4); diff --git a/src/test/modules/test_ddl_deparse_regress/expected/create_extension.out b/src/test/modules/test_ddl_deparse_regress/expected/create_extension.out new file mode 100644 index 0000000000..06ccc62ba9 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/expected/create_extension.out @@ -0,0 +1,6 @@ +--- +--- CREATE_EXTENSION +--- +CREATE EXTENSION pg_stat_statements; +NOTICE: deparsed json: {"fmt": "CREATE EXTENSION %{if_not_exists}s %{name}I %{options: }s", "name": "pg_stat_statements", "options": [{"fmt": "SCHEMA %{schema}I", "type": "schema", "schema": "public"}], "if_not_exists": ""} +NOTICE: re-formed command: CREATE EXTENSION pg_stat_statements SCHEMA public diff --git a/src/test/modules/test_ddl_deparse_regress/expected/create_schema.out b/src/test/modules/test_ddl_deparse_regress/expected/create_schema.out new file mode 100644 index 0000000000..1e09da5170 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/expected/create_schema.out @@ -0,0 +1,22 @@ +-- +-- CREATE_SCHEMA +-- +CREATE SCHEMA foo; +NOTICE: deparsed json: {"fmt": "CREATE SCHEMA %{if_not_exists}s %{name}I %{authorization}s", "name": "foo", "authorization": {"fmt": "AUTHORIZATION %{authorization_role}I", "present": false, "authorization_role": null}, "if_not_exists": ""} +NOTICE: re-formed command: CREATE SCHEMA foo +CREATE SCHEMA IF NOT EXISTS bar; +NOTICE: deparsed json: {"fmt": "CREATE SCHEMA %{if_not_exists}s %{name}I %{authorization}s", "name": "bar", "authorization": {"fmt": "AUTHORIZATION %{authorization_role}I", "present": false, "authorization_role": null}, "if_not_exists": "IF NOT EXISTS"} +NOTICE: re-formed command: CREATE SCHEMA IF NOT EXISTS bar +CREATE SCHEMA baz; +NOTICE: deparsed json: {"fmt": "CREATE SCHEMA %{if_not_exists}s %{name}I %{authorization}s", "name": "baz", "authorization": {"fmt": "AUTHORIZATION %{authorization_role}I", "present": false, "authorization_role": null}, "if_not_exists": ""} +NOTICE: re-formed command: CREATE SCHEMA baz +-- Will not be created, and will not be handled by the +-- event trigger +CREATE SCHEMA IF NOT EXISTS baz; +NOTICE: schema "baz" already exists, skipping +CREATE SCHEMA element_test + CREATE TABLE foo (id int) +NOTICE: deparsed json: {"fmt": "CREATE SCHEMA %{if_not_exists}s %{name}I %{authorization}s", "name": "element_test", "authorization": {"fmt": "AUTHORIZATION %{authorization_role}I", "present": false, "authorization_role": null}, "if_not_exists": ""} +NOTICE: re-formed command: CREATE SCHEMA element_test +NOTICE: deparsed json: {"fmt": "CREATE %{persistence}s TABLE %{if_not_exists}s %{identity}D (%{table_elements:, }s) %{inherits}s %{tablespace}s %{on_commit}s %{partition_by}s %{access_method}s %{with_clause}s", "identity": {"objname": "foo", "schemaname": "element_test"}, "inherits": {"fmt": "INHERITS (%{parents:, }D)", "parents": null, "present": false}, "on_commit": {"fmt": "ON COMMIT %{on_commit_value}s", "present": false, "on_commit_value": null}, "tablespace": {"fmt": "TABLESPACE %{tablespace}I", "present": false, "tablespace": null}, "persistence": "", "with_clause": {"fmt": "WITH", "present": false}, "partition_by": {"fmt": "PARTITION BY %{definition}s", "present": false, "definition": null}, "access_method": {"fmt": "USING %{access_method}I", "present": false, "access_method": null}, "if_not_exists": "", "table_elements": [{"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "id", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "int4", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE", "present": false}, "colstorage": "plain", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}]} +NOTICE: re-formed command: CREATE TABLE element_test.foo (id pg_catalog.int4 STORAGE plain ) diff --git a/src/test/modules/test_ddl_deparse_regress/expected/create_table.out b/src/test/modules/test_ddl_deparse_regress/expected/create_table.out new file mode 100644 index 0000000000..9eeaa1ccc1 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/expected/create_table.out @@ -0,0 +1,36 @@ +CREATE TABLE simple_table( + id int, + name varchar(5) +); +NOTICE: deparsed json: {"fmt": "CREATE %{persistence}s TABLE %{if_not_exists}s %{identity}D (%{table_elements:, }s) %{inherits}s %{tablespace}s %{on_commit}s %{partition_by}s %{access_method}s %{with_clause}s", "identity": {"objname": "simple_table", "schemaname": "public"}, "inherits": {"fmt": "INHERITS (%{parents:, }D)", "parents": null, "present": false}, "on_commit": {"fmt": "ON COMMIT %{on_commit_value}s", "present": false, "on_commit_value": null}, "tablespace": {"fmt": "TABLESPACE %{tablespace}I", "present": false, "tablespace": null}, "persistence": "", "with_clause": {"fmt": "WITH", "present": false}, "partition_by": {"fmt": "PARTITION BY %{definition}s", "present": false, "definition": null}, "access_method": {"fmt": "USING %{access_method}I", "present": false, "access_method": null}, "if_not_exists": "", "table_elements": [{"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "id", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "int4", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE", "present": false}, "colstorage": "plain", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}, {"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "name", "type": "column", "coltype": {"typmod": "(5)", "typarray": false, "typename": "varchar", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE %{name}D", "name": {"objname": "default", "schemaname": "pg_catalog"}}, "colstorage": "extended", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}]} +NOTICE: re-formed command: CREATE TABLE public.simple_table (id pg_catalog.int4 STORAGE plain , name pg_catalog."varchar"(5) STORAGE extended COLLATE pg_catalog."default" ) +-- Test CREATE INDEX with an INCLUDE CLAUSE +CREATE UNIQUE INDEX covering_index on simple_table (id) INCLUDE (name); +NOTICE: deparsed json: {"fmt": "CREATE %{unique}s INDEX %{concurrently}s %{if_not_exists}s %{name}I ON %{table}D USING %{index_am}s %{definition}s %{with}s %{tablespace}s %{where_clause}s", "name": "covering_index", "with": {"fmt": "WITH", "present": false}, "table": {"objname": "simple_table", "schemaname": "public"}, "unique": "UNIQUE", "index_am": "btree", "definition": "(id pg_catalog.int4_ops) INCLUDE (name)", "tablespace": {"fmt": "TABLESPACE", "present": false}, "concurrently": "", "where_clause": {"fmt": "WHERE", "present": false}, "if_not_exists": ""} +NOTICE: re-formed command: CREATE UNIQUE INDEX covering_index ON public.simple_table USING btree (id pg_catalog.int4_ops) INCLUDE (name) +-- Test TableLikeClause is handled properly +CREATE TABLE ctlt1 (a text CHECK (length(a) > 2) PRIMARY KEY, b text); +NOTICE: deparsed json: {"fmt": "CREATE %{persistence}s TABLE %{if_not_exists}s %{identity}D (%{table_elements:, }s) %{inherits}s %{tablespace}s %{on_commit}s %{partition_by}s %{access_method}s %{with_clause}s", "identity": {"objname": "ctlt1", "schemaname": "public"}, "inherits": {"fmt": "INHERITS (%{parents:, }D)", "parents": null, "present": false}, "on_commit": {"fmt": "ON COMMIT %{on_commit_value}s", "present": false, "on_commit_value": null}, "tablespace": {"fmt": "TABLESPACE %{tablespace}I", "present": false, "tablespace": null}, "persistence": "", "with_clause": {"fmt": "WITH", "present": false}, "partition_by": {"fmt": "PARTITION BY %{definition}s", "present": false, "definition": null}, "access_method": {"fmt": "USING %{access_method}I", "present": false, "access_method": null}, "if_not_exists": "", "table_elements": [{"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "a", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "text", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE %{name}D", "name": {"objname": "default", "schemaname": "pg_catalog"}}, "colstorage": "extended", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}, {"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "b", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "text", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE %{name}D", "name": {"objname": "default", "schemaname": "pg_catalog"}}, "colstorage": "extended", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}, {"fmt": "CONSTRAINT %{name}I %{definition}s", "name": "ctlt1_a_check", "type": "constraint", "contype": "check", "definition": "CHECK ((pg_catalog.length(a) OPERATOR(pg_catalog.>) 2))"}, {"fmt": "CONSTRAINT %{name}I %{definition}s", "name": "ctlt1_pkey", "type": "constraint", "contype": "primary key", "definition": "PRIMARY KEY (a)"}]} +NOTICE: re-formed command: CREATE TABLE public.ctlt1 (a pg_catalog.text STORAGE extended COLLATE pg_catalog."default" , b pg_catalog.text STORAGE extended COLLATE pg_catalog."default" , CONSTRAINT ctlt1_a_check CHECK ((pg_catalog.length(a) OPERATOR(pg_catalog.>) 2)), CONSTRAINT ctlt1_pkey PRIMARY KEY (a)) +NOTICE: deparsed json: +NOTICE: re-formed command: +ALTER TABLE ctlt1 ALTER COLUMN a SET STORAGE MAIN; +NOTICE: deparsed json: {"fmt": "ALTER %{objtype}s %{identity}D %{subcmds:, }s", "objtype": "TABLE", "subcmds": [{"fmt": "ALTER COLUMN %{column}I SET STORAGE %{storage}s", "type": "set storage", "column": "a", "storage": "main"}], "identity": {"objname": "ctlt1", "schemaname": "public"}} +NOTICE: re-formed command: ALTER TABLE public.ctlt1 ALTER COLUMN a SET STORAGE main +ALTER TABLE ctlt1 ALTER COLUMN b SET STORAGE EXTERNAL; +NOTICE: deparsed json: {"fmt": "ALTER %{objtype}s %{identity}D %{subcmds:, }s", "objtype": "TABLE", "subcmds": [{"fmt": "ALTER COLUMN %{column}I SET STORAGE %{storage}s", "type": "set storage", "column": "b", "storage": "external"}], "identity": {"objname": "ctlt1", "schemaname": "public"}} +NOTICE: re-formed command: ALTER TABLE public.ctlt1 ALTER COLUMN b SET STORAGE external +-- Test foreign key constraint is handled in a following ALTER TABLE ADD CONSTRAINT FOREIGN KEY REFERENCES subcommand +CREATE TABLE product (id int PRIMARY KEY, name text); +NOTICE: deparsed json: {"fmt": "CREATE %{persistence}s TABLE %{if_not_exists}s %{identity}D (%{table_elements:, }s) %{inherits}s %{tablespace}s %{on_commit}s %{partition_by}s %{access_method}s %{with_clause}s", "identity": {"objname": "product", "schemaname": "public"}, "inherits": {"fmt": "INHERITS (%{parents:, }D)", "parents": null, "present": false}, "on_commit": {"fmt": "ON COMMIT %{on_commit_value}s", "present": false, "on_commit_value": null}, "tablespace": {"fmt": "TABLESPACE %{tablespace}I", "present": false, "tablespace": null}, "persistence": "", "with_clause": {"fmt": "WITH", "present": false}, "partition_by": {"fmt": "PARTITION BY %{definition}s", "present": false, "definition": null}, "access_method": {"fmt": "USING %{access_method}I", "present": false, "access_method": null}, "if_not_exists": "", "table_elements": [{"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "id", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "int4", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE", "present": false}, "colstorage": "plain", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}, {"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "name", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "text", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE %{name}D", "name": {"objname": "default", "schemaname": "pg_catalog"}}, "colstorage": "extended", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}, {"fmt": "CONSTRAINT %{name}I %{definition}s", "name": "product_pkey", "type": "constraint", "contype": "primary key", "definition": "PRIMARY KEY (id)"}]} +NOTICE: re-formed command: CREATE TABLE public.product (id pg_catalog.int4 STORAGE plain , name pg_catalog.text STORAGE extended COLLATE pg_catalog."default" , CONSTRAINT product_pkey PRIMARY KEY (id)) +NOTICE: deparsed json: +NOTICE: re-formed command: +CREATE TABLE orders (order_id int PRIMARY KEY, product_id int +REFERENCES product (id)); +NOTICE: deparsed json: {"fmt": "CREATE %{persistence}s TABLE %{if_not_exists}s %{identity}D (%{table_elements:, }s) %{inherits}s %{tablespace}s %{on_commit}s %{partition_by}s %{access_method}s %{with_clause}s", "identity": {"objname": "orders", "schemaname": "public"}, "inherits": {"fmt": "INHERITS (%{parents:, }D)", "parents": null, "present": false}, "on_commit": {"fmt": "ON COMMIT %{on_commit_value}s", "present": false, "on_commit_value": null}, "tablespace": {"fmt": "TABLESPACE %{tablespace}I", "present": false, "tablespace": null}, "persistence": "", "with_clause": {"fmt": "WITH", "present": false}, "partition_by": {"fmt": "PARTITION BY %{definition}s", "present": false, "definition": null}, "access_method": {"fmt": "USING %{access_method}I", "present": false, "access_method": null}, "if_not_exists": "", "table_elements": [{"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "order_id", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "int4", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE", "present": false}, "colstorage": "plain", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}, {"fmt": "%{name}I %{coltype}T STORAGE %{colstorage}s %{compression}s %{collation}s %{not_null}s %{default}s %{generated_column}s", "name": "product_id", "type": "column", "coltype": {"typmod": "", "typarray": false, "typename": "int4", "schemaname": "pg_catalog"}, "default": {"fmt": "DEFAULT", "present": false}, "not_null": "", "collation": {"fmt": "COLLATE", "present": false}, "colstorage": "plain", "compression": {"fmt": "COMPRESSION %{compression_method}I", "present": false, "compression_method": null}, "generated_column": {"fmt": "GENERATED ALWAYS AS", "present": false}}, {"fmt": "CONSTRAINT %{name}I %{definition}s", "name": "orders_pkey", "type": "constraint", "contype": "primary key", "definition": "PRIMARY KEY (order_id)"}]} +NOTICE: re-formed command: CREATE TABLE public.orders (order_id pg_catalog.int4 STORAGE plain , product_id pg_catalog.int4 STORAGE plain , CONSTRAINT orders_pkey PRIMARY KEY (order_id)) +NOTICE: deparsed json: +NOTICE: re-formed command: +NOTICE: deparsed json: {"fmt": "ALTER %{objtype}s %{identity}D %{subcmds:, }s", "objtype": "TABLE", "subcmds": [{"fmt": "ADD CONSTRAINT %{name}I %{definition}s", "name": "orders_product_id_fkey", "type": "add constraint", "definition": "FOREIGN KEY (product_id) REFERENCES public.product(id)"}], "identity": {"objname": "orders", "schemaname": "public"}} +NOTICE: re-formed command: ALTER TABLE public.orders ADD CONSTRAINT orders_product_id_fkey FOREIGN KEY (product_id) REFERENCES public.product(id) diff --git a/src/test/modules/test_ddl_deparse_regress/expected/test_ddl_deparse.out b/src/test/modules/test_ddl_deparse_regress/expected/test_ddl_deparse.out new file mode 100644 index 0000000000..b0e2bf251a --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/expected/test_ddl_deparse.out @@ -0,0 +1,18 @@ +CREATE EXTENSION test_ddl_deparse_regress; +CREATE OR REPLACE FUNCTION test_ddl_deparse() + RETURNS event_trigger LANGUAGE plpgsql AS +$$ +DECLARE + r record; + deparsed_json text; +BEGIN + FOR r IN SELECT * FROM pg_event_trigger_ddl_commands() + LOOP + deparsed_json = pg_catalog.ddl_deparse_to_json(r.command); + RAISE NOTICE 'deparsed json: %', deparsed_json; + RAISE NOTICE 're-formed command: %', pg_catalog.ddl_deparse_expand_command(deparsed_json); + END LOOP; +END; +$$; +CREATE EVENT TRIGGER test_ddl_deparse +ON ddl_command_end EXECUTE PROCEDURE test_ddl_deparse(); diff --git a/src/test/modules/test_ddl_deparse_regress/meson.build b/src/test/modules/test_ddl_deparse_regress/meson.build new file mode 100644 index 0000000000..7a152a23e2 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/meson.build @@ -0,0 +1,42 @@ +# FIXME: prevent install during main install, but not during test :/ + +test_ddl_deparse_regress_sources = files( + 'test_ddl_deparse_regress.c', +) + +if host_system == 'windows' + test_ddl_deparse_regress_sources += rc_lib_gen.process(win32ver_rc, extra_args: [ + '--NAME', 'test_ddl_deparse_regress', + '--FILEDESC', 'test_ddl_deparse_regress - regression testing for DDL deparsing',]) +endif + +test_ddl_deparse_regress = shared_module('test_ddl_deparse_regress', + test_ddl_deparse_regress_sources, + kwargs: pg_mod_args, +) +testprep_targets += test_ddl_deparse_regress + +install_data( + 'test_ddl_deparse_regress.control', + 'test_ddl_deparse_regress--1.0.sql', + kwargs: contrib_data_args, +) + +tests += { + 'name': 'test_ddl_deparse_regress', + 'sd': meson.current_source_dir(), + 'bd': meson.current_build_dir(), + 'regress': { + 'sql': [ + 'test_ddl_deparse', + 'create_extension', + 'create_schema', + 'create_table', + ], + }, + 'tap': { + 'tests': [ + 't/001_compare_dumped_results.pl', + ], + }, +} diff --git a/src/test/modules/test_ddl_deparse_regress/sql/aggregate.sql b/src/test/modules/test_ddl_deparse_regress/sql/aggregate.sql new file mode 100644 index 0000000000..561b4e120a --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/sql/aggregate.sql @@ -0,0 +1,7 @@ +CREATE AGGREGATE newavg(int4) ( + sfunc = int4_avg_accum, stype = _int8, + finalfunc = int8_avg, + initcond1 = '{0,0}' +); + +DROP AGGREGATE newavg(int4); diff --git a/src/test/modules/test_ddl_deparse_regress/sql/create_extension.sql b/src/test/modules/test_ddl_deparse_regress/sql/create_extension.sql new file mode 100644 index 0000000000..d23e7fd9ce --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/sql/create_extension.sql @@ -0,0 +1,5 @@ +--- +--- CREATE_EXTENSION +--- + +CREATE EXTENSION pg_stat_statements; diff --git a/src/test/modules/test_ddl_deparse_regress/sql/create_schema.sql b/src/test/modules/test_ddl_deparse_regress/sql/create_schema.sql new file mode 100644 index 0000000000..10b13f0a55 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/sql/create_schema.sql @@ -0,0 +1,16 @@ +-- +-- CREATE_SCHEMA +-- + +CREATE SCHEMA foo; + +CREATE SCHEMA IF NOT EXISTS bar; + +CREATE SCHEMA baz; + +-- Will not be created, and will not be handled by the +-- event trigger +CREATE SCHEMA IF NOT EXISTS baz; + +CREATE SCHEMA element_test + CREATE TABLE foo (id int) diff --git a/src/test/modules/test_ddl_deparse_regress/sql/create_table.sql b/src/test/modules/test_ddl_deparse_regress/sql/create_table.sql new file mode 100644 index 0000000000..997d2f54fb --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/sql/create_table.sql @@ -0,0 +1,16 @@ +CREATE TABLE simple_table( + id int, + name varchar(5) +); +-- Test CREATE INDEX with an INCLUDE CLAUSE +CREATE UNIQUE INDEX covering_index on simple_table (id) INCLUDE (name); + +-- Test TableLikeClause is handled properly +CREATE TABLE ctlt1 (a text CHECK (length(a) > 2) PRIMARY KEY, b text); +ALTER TABLE ctlt1 ALTER COLUMN a SET STORAGE MAIN; +ALTER TABLE ctlt1 ALTER COLUMN b SET STORAGE EXTERNAL; + +-- Test foreign key constraint is handled in a following ALTER TABLE ADD CONSTRAINT FOREIGN KEY REFERENCES subcommand +CREATE TABLE product (id int PRIMARY KEY, name text); +CREATE TABLE orders (order_id int PRIMARY KEY, product_id int +REFERENCES product (id)); diff --git a/src/test/modules/test_ddl_deparse_regress/sql/test_ddl_deparse.sql b/src/test/modules/test_ddl_deparse_regress/sql/test_ddl_deparse.sql new file mode 100644 index 0000000000..0889fa02f9 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/sql/test_ddl_deparse.sql @@ -0,0 +1,20 @@ +CREATE EXTENSION test_ddl_deparse_regress; + +CREATE OR REPLACE FUNCTION test_ddl_deparse() + RETURNS event_trigger LANGUAGE plpgsql AS +$$ +DECLARE + r record; + deparsed_json text; +BEGIN + FOR r IN SELECT * FROM pg_event_trigger_ddl_commands() + LOOP + deparsed_json = pg_catalog.ddl_deparse_to_json(r.command); + RAISE NOTICE 'deparsed json: %', deparsed_json; + RAISE NOTICE 're-formed command: %', pg_catalog.ddl_deparse_expand_command(deparsed_json); + END LOOP; +END; +$$; + +CREATE EVENT TRIGGER test_ddl_deparse +ON ddl_command_end EXECUTE PROCEDURE test_ddl_deparse(); diff --git a/src/test/modules/test_ddl_deparse_regress/t/001_compare_dumped_results.pl b/src/test/modules/test_ddl_deparse_regress/t/001_compare_dumped_results.pl new file mode 100644 index 0000000000..664ad3498f --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/t/001_compare_dumped_results.pl @@ -0,0 +1,181 @@ +use strict; +use warnings; +use Env; +use PostgreSQL::Test::Cluster; +use PostgreSQL::Test::Utils; +use Test::More; +use File::Basename; + +sub execute_test_case { + my $test_name = $_[0]; + my $pub_node = $_[1]; + my $sub_node = $_[2]; + my $dbname = $_[3]; + my $outputdir = $PostgreSQL::Test::Utils::tmp_check; + + # set up deparse testing resources + create_deparse_testing_resources_on_pub_node($pub_node, $dbname); + + my $test_file = "./sql/${test_name}.sql"; + my $content = do{local(@ARGV,$/)=$test_file;<>}; + + $pub_node -> psql($dbname, $content); + + # retrieve SQL commands generated from deparsed DDLs on pub node + my $ddl_sql = ''; + $pub_node -> psql($dbname,q( + select ddl_deparse_expand_command(ddl) || ';' from deparsed_ddls ORDER BY id ASC), + stdout => \$ddl_sql); + + print "\nstart printing re-formed sql\n"; + print $ddl_sql; + print "\nend printing re-formed sql\n"; + # execute SQL commands on sub node + $sub_node -> psql($dbname, $ddl_sql); + + # clean up deparse testing resources + clean_deparse_testing_resources_on_pub_node($pub_node, $dbname); + + # dump from pub node and sub node + mkdir ${outputdir}."/dumps", 0755; + my $pub_dump = ${outputdir}."/dumps/${test_name}_pub.dump"; + my $sub_dump = ${outputdir}."/dumps/${test_name}_sub.dump"; + system("pg_dumpall " + . "-s " + . "-f " + . $pub_dump . " " + . "--no-sync " + . '-p ' + . $pub_node->port) == 0 or die "Dump pub node failed in ${test_name}"; + system("pg_dumpall " + . "-s " + . "-f " + . $sub_dump . " " + . "--no-sync " + . '-p ' + . $sub_node->port) == 0 or die "Dump sub node failed in ${test_name}"; + + # compare dumped results + is(system("diff " + . $pub_dump . " " + . $sub_dump), 0, "Dumped results diff in ${test_name}"); +} + +sub init_node { + my $node_name = $_[0]; + my $node = PostgreSQL::Test::Cluster->new($node_name); + $node->init; + # increase some settings that Cluster->new makes too low by default. + $node->adjust_conf('postgresql.conf', 'max_connections', '25'); + $node->append_conf('postgresql.conf', + 'max_prepared_transactions = 10'); + return $node; +} + +sub init_pub_node { + my $node_name = $_[0]."_pub"; + return init_node($node_name) +} + +sub init_sub_node { + my $node_name = $_[0]."_sub"; + return init_node($node_name) +} + +sub create_deparse_testing_resources_on_pub_node { + my $node = $_[0]; + my $dbname = $_[1]; + $node -> psql($dbname, q( + begin; + CREATE EXTENSION test_ddl_deparse_regress; + create table deparsed_ddls(id SERIAL PRIMARY KEY, tag text, object_identity text, ddl text); + + create or replace function deparse_to_json() + returns event_trigger language plpgsql as + $$ + declare + r record; + begin + for r in select * from pg_event_trigger_ddl_commands() + loop + insert into deparsed_ddls(tag, object_identity, ddl) values (r.command_tag, r.object_identity, pg_catalog.ddl_deparse_to_json(r.command)); + end loop; + END; + $$; + + create or replace function deparse_drops_to_json() + returns event_trigger language plpgsql as + $$ + declare + r record; + begin + for r in select * from pg_event_trigger_dropped_objects() + loop + insert into deparsed_ddls(tag, object_identity, ddl) values (r.object_type, r.object_identity, public.deparse_drop_ddl(r.object_identity, r.object_type)); + end loop; + END; + $$; + + create event trigger ddl_deparse_trig + on ddl_command_end execute procedure deparse_to_json(); + + create event trigger ddl_drops_deparse_trig + on sql_drop execute procedure deparse_drops_to_json(); + + commit; + )); +} + +sub clean_deparse_testing_resources_on_pub_node { + my $node = $_[0]; + my $dbname = $_[1]; + # Drop the event trigger and the function before taking a logical dump. + $node -> safe_psql($dbname,q( + drop event trigger ddl_deparse_trig; + drop event trigger ddl_drops_deparse_trig; + drop function deparse_to_json(); + drop function deparse_drops_to_json(); + drop table deparsed_ddls; + DROP EXTENSION test_ddl_deparse_regress; + )); +} + +sub trim { + my @out = @_; + for (@out) { + s/^\s+//; + s/\s+$//; + } + return wantarray ? @out : $out[0]; +} + +# Create and start pub sub nodes +my $pub_node = init_pub_node("test"); +my $sub_node = init_sub_node("test"); +my $dbname = "postgres"; +$pub_node -> start; +$sub_node -> start; + +# load test cases from the regression tests +my @regress_tests = split /\s+/, $ENV{REGRESS}; + +foreach(@regress_tests) { + my $test_name = trim($_); + # skip if it's regression test preparation or empty string + if ($test_name eq "" or $test_name eq "test_ddl_deparse") + { + next; + } + eval {execute_test_case($test_name, $pub_node, $sub_node, $dbname);}; + if ($@ ne "") + { + fail($@); + } +} +close; + +# Close nodes +$pub_node->stop; +$sub_node->stop; + +done_testing(); diff --git a/src/test/modules/test_ddl_deparse_regress/t/002_regress_tests.pl b/src/test/modules/test_ddl_deparse_regress/t/002_regress_tests.pl new file mode 100644 index 0000000000..3a875c33fb --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/t/002_regress_tests.pl @@ -0,0 +1,207 @@ +use strict; +use warnings; +use Env; +use PostgreSQL::Test::Cluster; +use PostgreSQL::Test::Utils; +use Test::More; +use File::Basename; + +sub execute_regress_test { + my $pub_node = $_[0]; + my $sub_node = $_[1]; + my $dbname = "postgres"; + my $dlpath = dirname($ENV{REGRESS_SHLIB}); + my $inputdir = "../../regress"; + my $outputdir = $PostgreSQL::Test::Utils::tmp_check; + + # set up deparse testing resources + create_deparse_testing_resources_on_pub_node($pub_node, $dbname); + + # execute core regression tests on pub node + my $regress_cmd = $ENV{PG_REGRESS} + . " " + . "--dlpath=\"$dlpath\" " + . "--max-concurrent-tests=20 " + . "--dbname=" + . $dbname . " " + . "--use-existing " + . "--host=" + . $pub_node->host . " " + . "--port=" + . $pub_node->port . " " + . "--inputdir=$inputdir " + . "--outputdir=\"$outputdir\" " + . "--schedule=$inputdir/parallel_schedule"; + print "Regression test command is $regress_cmd"; + my $rc = system($regress_cmd); + if ($rc != 0) + { + # If regression test fails, dump out the regression diffs file + my $diffs = "${outputdir}/regression/regression.diffs"; + if (-e $diffs) + { + print "=== dumping $diffs ===\n"; + print slurp_file($diffs); + print "=== EOF ===\n"; + } + die "Regression test failed"; + } + + # Retrieve SQL commands generated from deparsed DDLs on pub node + my $ddl_sql = ''; + $pub_node -> psql($dbname,q( + select ddl_deparse_expand_command(ddl) || ';' from deparsed_ddls ORDER BY id ASC), + stdout => \$ddl_sql); + + # Execute SQL commands on sub node + $sub_node -> psql($dbname, $ddl_sql); + + # Clean up deparse testing resources + clean_deparse_testing_resources_on_pub_node($pub_node, $dbname); + + # Dump from pub node and sub node + mkdir ${outputdir}."/dumps", 0755; + my $pub_dump = ${outputdir}."/dumps/regress_pub.dump"; + my $sub_dump = ${outputdir}."/dumps/regress_sub.dump"; + system("pg_dumpall " + . "-s " + . "-f " + . $pub_dump . " " + . "--no-sync " + . '-p ' + . $pub_node->port) == 0 or die "Dump pub node failed"; + system("pg_dumpall " + . "-s " + . "-f " + . $sub_dump . " " + . "--no-sync " + . '-p ' + . $sub_node->port) == 0 or die "Dump sub node failed"; + + # Compare dumped results + is(system("diff " + . $pub_dump . " " + . $sub_dump), 0, "Comparing dumped output"); + + # Close nodes + $pub_node->stop; + $sub_node->stop; +} + +sub init_node { + my $node_name = $_[0]; + my $node = PostgreSQL::Test::Cluster->new($node_name); + $node->init; + # increase some settings that Cluster->new makes too low by default. + $node->adjust_conf('postgresql.conf', 'max_connections', '25'); + $node->append_conf('postgresql.conf', + 'max_prepared_transactions = 10'); + return $node; +} + +sub init_pub_node { + my $node_name = $_[0]."_pub"; + return init_node($node_name) +} + +sub init_sub_node { + my $node_name = $_[0]."_sub"; + return init_node($node_name) +} + +sub create_deparse_testing_resources_on_pub_node { + my $node = $_[0]; + my $dbname = $_[1]; + $node -> psql($dbname, q( + begin; + CREATE EXTENSION test_ddl_deparse_regress; + create table deparsed_ddls(id SERIAL PRIMARY KEY, tag text, object_identity text, ddl text); + create or replace function deparse_to_json() + returns event_trigger language plpgsql as + $$ + declare + r record; + begin + for r in select * from pg_event_trigger_ddl_commands() + loop + insert into deparsed_ddls(tag, object_identity, ddl) values (r.command_tag, r.object_identity, pg_catalog.ddl_deparse_to_json(r.command)); + end loop; + END; + $$; + create or replace function deparse_drops_to_json() + returns event_trigger language plpgsql as + $$ + declare + r record; + begin + for r in select * from pg_event_trigger_dropped_objects() + loop + insert into deparsed_ddls(tag, object_identity, ddl) values (r.object_type, r.object_identity, public.deparse_drop_ddl(r.object_identity, r.object_type)); + end loop; + END; + $$; + create event trigger ddl_deparse_trig + on ddl_command_end execute procedure deparse_to_json(); + create event trigger ddl_drops_deparse_trig + on sql_drop execute procedure deparse_drops_to_json(); + commit; + )); +} + +sub clean_deparse_testing_resources_on_pub_node { + my $node = $_[0]; + my $dbname = $_[1]; + # Drop the event trigger and the function before taking a logical dump. + $node -> safe_psql($dbname,q( + drop event trigger ddl_deparse_trig; + drop event trigger ddl_drops_deparse_trig; + drop function deparse_to_json(); + drop function deparse_drops_to_json(); + drop table deparsed_ddls; + DROP EXTENSION test_ddl_deparse_regress; + )); +} + +sub trim { + my @out = @_; + for (@out) { + s/^\s+//; + s/\s+$//; + } + return wantarray ? @out : $out[0]; +} + +# Create and start pub sub nodes +my $pub_node = init_pub_node("regress"); +my $sub_node = init_sub_node("regress"); +$pub_node -> start; +$sub_node -> start; + +# Comment the execution temporarily, an error in ddl_deparse.c will cause the database exits abnormally, error signature: +# +# 2022-12-03 23:02:44.778 UTC [129102] pg_regress/tablespace LOG: statement: ALTER TABLE ALL IN TABLESPACE regress_tblspace_renamed SET TABLESPACE pg_default; +# TRAP: failed Assert("IsA(stmt, AlterTableStmt)"), File: "ddl_deparse.c", Line: 3371, PID: 129102 +# postgres: regress_pub: runqi postgres [local] ALTER TABLE(ExceptionalCondition+0xbb)[0x563bd6ea5b0d] +# ... +# /lib/x86_64-linux-gnu/libc.so.6(+0x29d90)[0x7fd6a74a6d90] +# /lib/x86_64-linux-gnu/libc.so.6(__libc_start_main+0x80)[0x7fd6a74a6e40] +# postgres: regress_pub: runqi postgres [local] ALTER TABLE(_start+0x25)[0x563bd672f1e5] +# 2022-12-03 23:02:44.850 UTC [129058] LOG: server process (PID 129102) was terminated by signal 6: Aborted +# 2022-12-03 23:02:44.850 UTC [129058] DETAIL: Failed process was running: ALTER TABLE ALL IN TABLESPACE regress_tblspace_renamed SET TABLESPACE pg_default; +# 2022-12-03 23:02:44.850 UTC [129058] LOG: terminating any other active server processes +# 2022-12-03 23:02:44.851 UTC [129058] LOG: shutting down because restart_after_crash is off +# 2022-12-03 23:02:44.852 UTC [129058] LOG: database system is shut down + +# eval {execute_regress_test($pub_node, $sub_node);}; +# if ($@ ne "") +# { +# fail($@); +# } + +# Close nodes +$pub_node->stop; +$sub_node->stop; + +pass("regresssion test passed"); + +done_testing(); \ No newline at end of file diff --git a/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress--1.0.sql b/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress--1.0.sql new file mode 100644 index 0000000000..14070cd51f --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress--1.0.sql @@ -0,0 +1,9 @@ +/* src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress--1.0.sql */ + +-- complain if script is sourced in psql, rather than via CREATE EXTENSION +\echo Use "CREATE EXTENSION test_ddl_deparse_regress" to load this file. \quit + +CREATE FUNCTION deparse_drop_ddl(IN objidentity text, + IN objecttype text) + RETURNS text IMMUTABLE STRICT + AS 'MODULE_PATHNAME' LANGUAGE C; \ No newline at end of file diff --git a/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.c b/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.c new file mode 100644 index 0000000000..bd6992bdfd --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.c @@ -0,0 +1,59 @@ +/*---------------------------------------------------------------------- + * test_ddl_deparse_regress.c + * Support functions for the test_ddl_deparse_regress module + * + * Copyright (c) 2014-2022, PostgreSQL Global Development Group + * + * IDENTIFICATION + * src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.c + *---------------------------------------------------------------------- + */ +#include "postgres.h" + +#include "catalog/pg_type.h" +#include "funcapi.h" +#include "nodes/execnodes.h" +#include "tcop/deparse_utility.h" +#include "tcop/utility.h" +#include "utils/builtins.h" +#include "tcop/ddl_deparse.h" + +PG_MODULE_MAGIC; + +PG_FUNCTION_INFO_V1(deparse_drop_ddl); + +/* + * Given object_identity and object_type of dropped object, return a JSON representation of DROP command. + */ +Datum +deparse_drop_ddl(PG_FUNCTION_ARGS) +{ + text *objidentity = PG_GETARG_TEXT_P(0); + const char *objidentity_str = text_to_cstring(objidentity); + text *objecttype = PG_GETARG_TEXT_P(1); + const char *objecttype_str = text_to_cstring(objecttype); + + char *command; + + // constraint is part of alter table command, no need to drop in DROP command + if (strcmp(objecttype_str, "table constraint") == 0) { + PG_RETURN_NULL(); + } else if (strcmp(objecttype_str, "toast table") == 0) { + objecttype_str = "table"; + } else if (strcmp(objecttype_str, "default value") == 0) { + PG_RETURN_NULL(); + } else if (strcmp(objecttype_str, "operator of access method") == 0) { + PG_RETURN_NULL(); + } else if (strcmp(objecttype_str, "function of access method") == 0) { + PG_RETURN_NULL(); + } else if (strcmp(objecttype_str, "table column") == 0) { + PG_RETURN_NULL(); + } + + command = deparse_drop_command(objidentity_str, objecttype_str, DROP_CASCADE); + + if (command) + PG_RETURN_TEXT_P(cstring_to_text(command)); + + PG_RETURN_NULL(); +} \ No newline at end of file diff --git a/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.control b/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.control new file mode 100644 index 0000000000..a1f934e658 --- /dev/null +++ b/src/test/modules/test_ddl_deparse_regress/test_ddl_deparse_regress.control @@ -0,0 +1,4 @@ +comment = 'Test code for DDL deparse regress feature' +default_version = '1.0' +module_pathname = '$libdir/test_ddl_deparse_regress' +relocatable = true -- 2.39.1.windows.1