Skip to content
6 changes: 3 additions & 3 deletions src/Makefile.global.in
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,7 @@ cd $(srcdir) && \
TESTDIR='$(CURDIR)' PATH="$(bindir):$(CURDIR):$$PATH" \
PGPORT='6$(DEF_PGPORT)' top_builddir='$(CURDIR)/$(top_builddir)' \
PG_REGRESS='$(CURDIR)/$(top_builddir)/src/oracle_test/regress/pg_regress' \
$(PROVE) $(ORACLE_PROVE_FLAGS) $(PROVE_FLAGS) $(if $(PROVE_TESTS),$(PROVE_TESTS),t/*.pl)
$(PROVE) $(ORACLE_PROVE_FLAGS) $(PROVE_FLAGS) $(if $(PROVE_TESTS),$(PROVE_TESTS),t/*.pl $(wildcard t/oracle/*.pl))
endef
#IvorySQL:END - SQL oracle_test
else # PGXS case
Expand All @@ -537,7 +537,7 @@ cd $(srcdir) && \
PATH="$(bindir):$(CURDIR):$$PATH" \
PGPORT='6$(DEF_PGPORT)' \
PG_REGRESS='$(top_builddir)/src/oracle_test/regress/pg_regress' \
$(PROVE) $(ORACLE_PROVE_FLAGS) $(PROVE_FLAGS) $(if $(PROVE_TESTS),$(PROVE_TESTS),t/*.pl)
$(PROVE) $(ORACLE_PROVE_FLAGS) $(PROVE_FLAGS) $(if $(PROVE_TESTS),$(PROVE_TESTS),t/*.pl $(wildcard t/oracle/*.pl))
endef
#IvorySQL:END - SQL oracle_test
endif # PGXS
Expand Down Expand Up @@ -566,7 +566,7 @@ cd $(srcdir) && \
$(with_temp_install) \
PGPORT='6$(DEF_PGPORT)' top_builddir='$(CURDIR)/$(top_builddir)' \
PG_REGRESS='$(CURDIR)/$(top_builddir)/src/oracle_test/regress/pg_regress' \
$(PROVE) $(ORACLE_PROVE_FLAGS) $(PROVE_FLAGS) $(if $(PROVE_TESTS),$(PROVE_TESTS),t/*.pl)
$(PROVE) $(ORACLE_PROVE_FLAGS) $(PROVE_FLAGS) $(if $(PROVE_TESTS),$(PROVE_TESTS),t/*.pl $(wildcard t/oracle/*.pl))
endef
#IvorySQL:END - SQL oracle_test

Expand Down
2 changes: 1 addition & 1 deletion src/bin/pg_dump/pg_dump.c
Original file line number Diff line number Diff line change
Expand Up @@ -6920,7 +6920,7 @@ getPackages(Archive *fout, int *numPkgs)
i_pkgname = PQfnumber(res, "pkgname");
i_oid = PQfnumber(res, "oid");
i_pkgnamespace = PQfnumber(res, "pkgnamespace");
i_rolname = PQfnumber(res, "rolname");
i_rolname = PQfnumber(res, "pkgowner");
i_pkgacl = PQfnumber(res, "pkgacl");
i_acldefault = PQfnumber(res, "acldefault");

Expand Down
350 changes: 350 additions & 0 deletions src/bin/pg_dump/t/oracle/001_pg_dump_ora_specific.pl
Original file line number Diff line number Diff line change
@@ -0,0 +1,350 @@

# Copyright (c) 2021-2024, PostgreSQL Global Development Group
# Portions Copyright (c) 2023-2025, IvorySQL Global Development Team

Comment thread
yasir-hussain-shah marked this conversation as resolved.
use strict;
use warnings FATAL => 'all';

use PostgreSQL::Test::Cluster;
use PostgreSQL::Test::Utils;
use Test::More;

my $tempdir = PostgreSQL::Test::Utils::tempdir;

###############################################################
# Definition of the pg_dump runs to make.
#
# Each of these runs are named and those names are used below
# to define how each test should (or shouldn't) treat a result
# from a given run.
#
# test_key indicates that a given run should simply use the same
# set of like/unlike tests as another run, and which run that is.
#
# compile_option indicates if the commands run depend on a compilation
# option, if any. This can be used to control if tests should be
# skipped when a build dependency is not satisfied.
#
# dump_cmd is the pg_dump command to run, which is an array of
# the full command and arguments to run. Note that this is run
# using $node->command_ok(), so the port does not need to be
# specified and is pulled from $PGPORT, which is set by the
# PostgreSQL::Test::Cluster system.
#
# compress_cmd is the utility command for (de)compression, if any.
# Note that this should generally be used on pg_dump's output
# either to generate a text file to run the through the tests, or
# to test pg_restore's ability to parse manually compressed files
# that otherwise pg_dump does not compress on its own (e.g. *.toc).
#
# glob_patterns is an optional array consisting of strings compilable
# with glob() to check the files generated after a dump.
#
# command_like is an optional utility that can be used to compare
# the output generated by a command on the contents of an existing
# dump, like the TOC description of a pg_restore command.
#
# restore_cmd is the pg_restore command to run, if any. Note
# that this should generally be used when the pg_dump goes to
# a non-text file and that the restore can then be used to
# generate a text file to run through the tests from the
# non-text file generated by pg_dump.
#
# TODO: Have pg_restore actually restore to an independent
# database and then pg_dump *that* database (or something along
# those lines) to validate that part of the process.

my %pgdump_runs = (
packages => {
dump_cmd => [
'pg_dump', '--format=plain',
"--file=$tempdir/packages.sql", '--no-owner','--schema-only',
'testdb',
],
},

clean_if_exists => {
dump_cmd => [
'pg_dump',
'--no-sync',
"--file=$tempdir/clean_if_exists.sql",
'-c',
'--if-exists',
'--encoding=UTF8', # no-op, just tests that option is accepted
'testdb',
],
},
);

###############################################################
# Definition of the tests to run.
#
# Each test is defined using the log message that will be used.
#
# A regexp should be defined for each test which provides the
# basis for the test. That regexp will be run against the output
# file of each of the runs which the test is to be run against
# and the success of the result will depend on if the regexp
# result matches the expected 'like' or 'unlike' case.
#
# The runs listed as 'like' will be checked if they match the
# regexp and, if so, the test passes. All runs which are not
# listed as 'like' will be checked to ensure they don't match
# the regexp; if they do, the test will fail.
#
# The below hashes provide convenience sets of runs. Individual
# runs can be excluded from a general hash by placing that run
# into the 'unlike' section.
#
# For example, there is an 'exclude_test_table' run which runs a
# full pg_dump but with an exclude flag to not include the test
# table. The CREATE TABLE test which creates the test table is
# defined with %full_runs but then has 'exclude_test_table' in
# its 'unlike' list, excluding that test.
#
# There can then be a 'create_sql' and 'create_order' for a
# given test. The 'create_sql' commands are collected up in
# 'create_order' and then run against the database prior to any
# of the pg_dump runs happening. This is what "seeds" the
# system with objects to be dumped out.
#
# There can be a flag called 'lz4', which can be set if the test
# case depends on LZ4. Tests marked with this flag are skipped if
# the build used does not support LZ4.
#
# Building of this hash takes a bit of time as all of the regexps
# included in it are compiled. This greatly improves performance
# as the regexps are used for each run the test applies to.


# Tests which are considered 'full' dumps by pg_dump, but there
# are flags used to exclude specific items (ACLs, LOs, etc).
my %full_runs = (
packages => 1,
clean_if_exists => 1,);

# This is where the actual tests are defined.
my %tests = (
'CREATE PACKAGE test_pkg' => {
create_order => 1,
create_sql =>
'CREATE OR REPLACE PACKAGE test_pkg AS
FUNCTION hello() RETURN text;
END test_pkg;',
regexp => qr/^CREATE EDITIONABLE PACKAGE public.test_pkg AUTHID DEFINER AS/m,
like => { %full_runs,},
},

'CREATE PACKAGE BODY test_pkg' => {
create_order => 2,
create_sql =>
'CREATE OR REPLACE PACKAGE BODY test_pkg AS
FUNCTION hello() RETURN text IS
BEGIN
RETURN \'Hello from test package\';
END;
END test_pkg;',
regexp => qr/^CREATE PACKAGE BODY public.test_pkg AS/m,
like => { %full_runs, },
},
Comment thread
yasir-hussain-shah marked this conversation as resolved.

'DROP PACKAGE IF EXISTS public.test_pkg;' => {
regexp => qr/^DROP PACKAGE IF EXISTS public.test_pkg;/m,
like => { clean_if_exists => 1, },
},
);

#########################################
# Create a PG instance to test actually dumping from

my $node = PostgreSQL::Test::Cluster->new('main');
$node->init;
$node->start;

my ($connect_to_oraport);

# ICU doesn't work with some encodings
my $encoding = $node->safe_psql('postgres', 'show server_encoding', connect_to_oraport => 1);

# Create additional databases for mutations of schema public
$node->psql('postgres', 'create database testdb;', connect_to_oraport => 1);

#########################################
# Set up schemas, tables, etc, to be dumped.

# Build up the create statements
my %create_sql = ();

foreach my $test (
sort {
if ($tests{$a}->{create_order} and $tests{$b}->{create_order})
{
$tests{$a}->{create_order} <=> $tests{$b}->{create_order};
}
elsif ($tests{$a}->{create_order})
{
-1;
}
elsif ($tests{$b}->{create_order})
{
1;
}
else
{
0;
}
} keys %tests)
{
my $test_db = 'testdb';

if (defined($tests{$test}->{database}))
{
$test_db = $tests{$test}->{database};
}

if ($tests{$test}->{create_sql})
{
# Normalize command ending: strip all line endings, add
# semicolon if missing, add two newlines.
my $create_sql = $tests{$test}->{create_sql};
chomp $create_sql;
$create_sql .= ';' unless substr($create_sql, -1) eq ';';
$create_sql{$test_db} .= $create_sql . "\n\n";
}
}

# Send the combined set of commands to psql
foreach my $db (sort keys %create_sql)
{
$node->safe_psql($db, $create_sql{$db}, connect_to_oraport => 1);
}

#########################################
# Run all runs

foreach my $run (sort keys %pgdump_runs)
{
my $test_key = $run;
my $run_db = 'testdb';


$node->command_ok(\@{ $pgdump_runs{$run}->{dump_cmd} },
"$run: pg_dump runs");

if ($pgdump_runs{$run}->{compress_cmd})
{
my ($compress_cmd) = $pgdump_runs{$run}->{compress_cmd};
my $compress_program = $compress_cmd->{program};

# Skip the rest of the test if the compression program is
# not defined.
next if (!defined($compress_program) || $compress_program eq '');

# Arguments may require globbing.
my @full_compress_cmd = ($compress_program);
foreach my $arg (@{ $compress_cmd->{args} })
{
push @full_compress_cmd, glob($arg);
}

command_ok(\@full_compress_cmd, "$run: compression commands");
}

if ($pgdump_runs{$run}->{glob_patterns})
{
my $glob_patterns = $pgdump_runs{$run}->{glob_patterns};
foreach my $glob_pattern (@{$glob_patterns})
{
my @glob_output = glob($glob_pattern);
is(scalar(@glob_output) > 0, 1, "$run: glob check for $glob_pattern");
}
}

if ($pgdump_runs{$run}->{command_like})
{
my $cmd_like = $pgdump_runs{$run}->{command_like};
$node->command_like(\@{ $cmd_like->{command} },
$cmd_like->{expected},
"$run: " . $cmd_like->{name})
}

if ($pgdump_runs{$run}->{restore_cmd})
{
$node->command_ok(\@{ $pgdump_runs{$run}->{restore_cmd} },
"$run: pg_restore runs");
}

if ($pgdump_runs{$run}->{test_key})
{
$test_key = $pgdump_runs{$run}->{test_key};
}

my $output_file = slurp_file("$tempdir/${run}.sql");

#########################################
# Run all tests where this run is included
# as either a 'like' or 'unlike' test.

foreach my $test (sort keys %tests)
{
my $test_db = 'testdb';

if (defined($pgdump_runs{$run}->{database}))
{
$run_db = $pgdump_runs{$run}->{database};
}

if (defined($tests{$test}->{database}))
{
$test_db = $tests{$test}->{database};
}

# Check for proper test definitions
#
# There should be a "like" list, even if it is empty. (This
# makes the test more self-documenting.)
if (!defined($tests{$test}->{like}))
{
die "missing \"like\" in test \"$test\"";
}
# Check for useless entries in "unlike" list. Runs that are
# not listed in "like" don't need to be excluded in "unlike".
if ($tests{$test}->{unlike}->{$test_key}
&& !defined($tests{$test}->{like}->{$test_key}))
{
die "useless \"unlike\" entry \"$test_key\" in test \"$test\"";
}
Comment thread
yasir-hussain-shah marked this conversation as resolved.

if ($run_db ne $test_db)
{
next;
}

# Run the test listed as a like, unless it is specifically noted
# as an unlike (generally due to an explicit exclusion or similar).
if ($tests{$test}->{like}->{$test_key}
&& !defined($tests{$test}->{unlike}->{$test_key}))
{
if (!ok($output_file =~ $tests{$test}->{regexp},
"$run: should dump $test"))
{
diag("Review $run results in $tempdir");
}
}
else
{
if (!ok($output_file !~ $tests{$test}->{regexp},
"$run: should not dump $test"))
{
diag("Review $run results in $tempdir");
}
}
}
}

#########################################
# Stop the database instance, which will be removed at the end of the tests.

$node->stop('fast');

done_testing();
Loading