aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/pl/plpgsql/src/input/plpgsql_copy.source46
-rw-r--r--src/pl/plpgsql/src/output/plpgsql_copy.source43
-rw-r--r--src/test/regress/input/constraints.source9
-rw-r--r--src/test/regress/input/copy.source96
-rw-r--r--src/test/regress/input/create_function_0.source41
-rw-r--r--src/test/regress/input/create_function_1.source14
-rw-r--r--src/test/regress/input/create_function_2.source18
-rw-r--r--src/test/regress/input/largeobject.source35
-rw-r--r--src/test/regress/input/misc.source16
-rw-r--r--src/test/regress/input/tablespace.source11
-rw-r--r--src/test/regress/output/constraints.source8
-rw-r--r--src/test/regress/output/copy.source95
-rw-r--r--src/test/regress/output/create_function_0.source46
-rw-r--r--src/test/regress/output/create_function_1.source12
-rw-r--r--src/test/regress/output/create_function_2.source14
-rw-r--r--src/test/regress/output/largeobject.source34
-rw-r--r--src/test/regress/output/largeobject_1.source34
-rw-r--r--src/test/regress/output/misc.source15
-rw-r--r--src/test/regress/output/tablespace.source9
-rw-r--r--src/test/regress/pg_regress.c8
20 files changed, 374 insertions, 230 deletions
diff --git a/src/pl/plpgsql/src/input/plpgsql_copy.source b/src/pl/plpgsql/src/input/plpgsql_copy.source
index b7bcbb7d17f..37f1fa132b2 100644
--- a/src/pl/plpgsql/src/input/plpgsql_copy.source
+++ b/src/pl/plpgsql/src/input/plpgsql_copy.source
@@ -1,3 +1,11 @@
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
+-- set up file names to use
+\set srcfilename :abs_srcdir '/data/copy1.data'
+\set destfilename :abs_builddir '/results/copy1.data'
+
CREATE TABLE copy1 (a int, b float);
-- COPY TO/FROM not authorized from client.
@@ -24,38 +32,26 @@ $$;
-- Valid cases
-- COPY FROM
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_srcdir@/data/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'srcfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_srcdir@/data/copy1.data''';
-END;
-$$;
+\set cmd 'COPY copy1 FROM ' :'srcfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
-- COPY TO
-- Copy the data externally once, then process it back to the table.
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 TO '@abs_builddir@/results/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 TO ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_builddir@/results/copy1.data';
-END;
-$$;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_builddir@/results/copy1.data''';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
+
+\set cmd 'COPY copy1 FROM ' :'destfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
diff --git a/src/pl/plpgsql/src/output/plpgsql_copy.source b/src/pl/plpgsql/src/output/plpgsql_copy.source
index 86e833d055a..bc834be1971 100644
--- a/src/pl/plpgsql/src/output/plpgsql_copy.source
+++ b/src/pl/plpgsql/src/output/plpgsql_copy.source
@@ -1,3 +1,9 @@
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+-- set up file names to use
+\set srcfilename :abs_srcdir '/data/copy1.data'
+\set destfilename :abs_builddir '/results/copy1.data'
CREATE TABLE copy1 (a int, b float);
-- COPY TO/FROM not authorized from client.
DO LANGUAGE plpgsql $$
@@ -30,11 +36,8 @@ ERROR: cannot COPY to/from client in PL/pgSQL
CONTEXT: PL/pgSQL function inline_code_block line 3 at EXECUTE
-- Valid cases
-- COPY FROM
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_srcdir@/data/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'srcfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
a | b
---+-----
@@ -44,11 +47,9 @@ SELECT * FROM copy1 ORDER BY 1;
(3 rows)
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_srcdir@/data/copy1.data''';
-END;
-$$;
+\set cmd 'COPY copy1 FROM ' :'srcfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
a | b
---+-----
@@ -59,22 +60,14 @@ SELECT * FROM copy1 ORDER BY 1;
-- COPY TO
-- Copy the data externally once, then process it back to the table.
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 TO '@abs_builddir@/results/copy1.data';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 TO ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
TRUNCATE copy1;
-DO LANGUAGE plpgsql $$
-BEGIN
- COPY copy1 FROM '@abs_builddir@/results/copy1.data';
-END;
-$$;
-DO LANGUAGE plpgsql $$
-BEGIN
- EXECUTE 'COPY copy1 FROM ''@abs_builddir@/results/copy1.data''';
-END;
-$$;
+\set dobody 'BEGIN COPY copy1 FROM ' :'destfilename' '; END'
+DO LANGUAGE plpgsql :'dobody';
+\set cmd 'COPY copy1 FROM ' :'destfilename'
+\set dobody 'BEGIN EXECUTE ' :'cmd' '; END'
+DO LANGUAGE plpgsql :'dobody';
SELECT * FROM copy1 ORDER BY 1;
a | b
---+-----
diff --git a/src/test/regress/input/constraints.source b/src/test/regress/input/constraints.source
index 6bb76483218..458f8057785 100644
--- a/src/test/regress/input/constraints.source
+++ b/src/test/regress/input/constraints.source
@@ -8,6 +8,9 @@
-- - EXCLUDE clauses
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+
--
-- DEFAULT syntax
--
@@ -239,11 +242,13 @@ CREATE TABLE COPY_TBL (x INT, y TEXT, z INT,
CONSTRAINT COPY_CON
CHECK (x > 3 AND y <> 'check failed' AND x < 7 ));
-COPY COPY_TBL FROM '@abs_srcdir@/data/constro.data';
+\set filename :abs_srcdir '/data/constro.data'
+COPY COPY_TBL FROM :'filename';
SELECT * FROM COPY_TBL;
-COPY COPY_TBL FROM '@abs_srcdir@/data/constrf.data';
+\set filename :abs_srcdir '/data/constrf.data'
+COPY COPY_TBL FROM :'filename';
SELECT * FROM COPY_TBL;
diff --git a/src/test/regress/input/copy.source b/src/test/regress/input/copy.source
index 8acb5168015..15e26517ecb 100644
--- a/src/test/regress/input/copy.source
+++ b/src/test/regress/input/copy.source
@@ -2,65 +2,90 @@
-- COPY
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
-COPY aggtest FROM '@abs_srcdir@/data/agg.data';
+\set filename :abs_srcdir '/data/agg.data'
+COPY aggtest FROM :'filename';
-COPY onek FROM '@abs_srcdir@/data/onek.data';
+\set filename :abs_srcdir '/data/onek.data'
+COPY onek FROM :'filename';
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
+COPY onek FROM :'filename';
-COPY tenk1 FROM '@abs_srcdir@/data/tenk.data';
+\set filename :abs_srcdir '/data/tenk.data'
+COPY tenk1 FROM :'filename';
-COPY slow_emp4000 FROM '@abs_srcdir@/data/rect.data';
+\set filename :abs_srcdir '/data/rect.data'
+COPY slow_emp4000 FROM :'filename';
-COPY person FROM '@abs_srcdir@/data/person.data';
+\set filename :abs_srcdir '/data/person.data'
+COPY person FROM :'filename';
-COPY emp FROM '@abs_srcdir@/data/emp.data';
+\set filename :abs_srcdir '/data/emp.data'
+COPY emp FROM :'filename';
-COPY student FROM '@abs_srcdir@/data/student.data';
+\set filename :abs_srcdir '/data/student.data'
+COPY student FROM :'filename';
-COPY stud_emp FROM '@abs_srcdir@/data/stud_emp.data';
+\set filename :abs_srcdir '/data/stud_emp.data'
+COPY stud_emp FROM :'filename';
-COPY road FROM '@abs_srcdir@/data/streets.data';
+\set filename :abs_srcdir '/data/streets.data'
+COPY road FROM :'filename';
-COPY real_city FROM '@abs_srcdir@/data/real_city.data';
+\set filename :abs_srcdir '/data/real_city.data'
+COPY real_city FROM :'filename';
-COPY hash_i4_heap FROM '@abs_srcdir@/data/hash.data';
+\set filename :abs_srcdir '/data/hash.data'
+COPY hash_i4_heap FROM :'filename';
-COPY hash_name_heap FROM '@abs_srcdir@/data/hash.data';
+COPY hash_name_heap FROM :'filename';
-COPY hash_txt_heap FROM '@abs_srcdir@/data/hash.data';
+COPY hash_txt_heap FROM :'filename';
-COPY hash_f8_heap FROM '@abs_srcdir@/data/hash.data';
+COPY hash_f8_heap FROM :'filename';
-COPY test_tsvector FROM '@abs_srcdir@/data/tsearch.data';
+\set filename :abs_srcdir '/data/tsearch.data'
+COPY test_tsvector FROM :'filename';
-COPY testjsonb FROM '@abs_srcdir@/data/jsonb.data';
+\set filename :abs_srcdir '/data/jsonb.data'
+COPY testjsonb FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
--- COPY hash_ovfl_heap FROM '@abs_srcdir@/data/hashovfl.data';
+-- \set filename :abs_srcdir '/data/hashovfl.data'
+-- COPY hash_ovfl_heap FROM :'filename';
-COPY bt_i4_heap FROM '@abs_srcdir@/data/desc.data';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_i4_heap FROM :'filename';
-COPY bt_name_heap FROM '@abs_srcdir@/data/hash.data';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_name_heap FROM :'filename';
-COPY bt_txt_heap FROM '@abs_srcdir@/data/desc.data';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_txt_heap FROM :'filename';
-COPY bt_f8_heap FROM '@abs_srcdir@/data/hash.data';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_f8_heap FROM :'filename';
-COPY array_op_test FROM '@abs_srcdir@/data/array.data';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_op_test FROM :'filename';
-COPY array_index_op_test FROM '@abs_srcdir@/data/array.data';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_index_op_test FROM :'filename';
-- analyze all the data we just loaded, to ensure plan consistency
-- in later tests
@@ -100,11 +125,12 @@ insert into copytest values('Unix',E'abc\ndef',2);
insert into copytest values('Mac',E'abc\rdef',3);
insert into copytest values(E'esc\\ape',E'a\\r\\\r\\\n\\nb',4);
-copy copytest to '@abs_builddir@/results/copytest.csv' csv;
+\set filename :abs_builddir '/results/copytest.csv'
+copy copytest to :'filename' csv;
create temp table copytest2 (like copytest);
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv;
+copy copytest2 from :'filename' csv;
select * from copytest except select * from copytest2;
@@ -112,9 +138,9 @@ truncate copytest2;
--- same test but with an escape char different from quote char
-copy copytest to '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
+copy copytest to :'filename' csv quote '''' escape E'\\';
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
+copy copytest2 from :'filename' csv quote '''' escape E'\\';
select * from copytest except select * from copytest2;
@@ -153,16 +179,17 @@ insert into parted_copytest select x,1,'One' from generate_series(1,1000) x;
insert into parted_copytest select x,2,'Two' from generate_series(1001,1010) x;
insert into parted_copytest select x,1,'One' from generate_series(1011,1020) x;
-copy (select * from parted_copytest order by a) to '@abs_builddir@/results/parted_copytest.csv';
+\set filename :abs_builddir '/results/parted_copytest.csv'
+copy (select * from parted_copytest order by a) to :'filename';
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
-- Ensure COPY FREEZE errors for partitioned tables.
begin;
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv' (freeze);
+copy parted_copytest from :'filename' (freeze);
rollback;
select tableoid::regclass,count(*),sum(a) from parted_copytest
@@ -182,7 +209,7 @@ create trigger part_ins_trig
for each row
execute procedure part_ins_func();
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
select tableoid::regclass,count(*),sum(a) from parted_copytest
group by tableoid order by tableoid::regclass::name;
@@ -257,7 +284,8 @@ bill 20 (11,10) 1000 sharon
-- Generate COPY FROM report with FILE, with some excluded tuples.
truncate tab_progress_reporting;
-copy tab_progress_reporting from '@abs_srcdir@/data/emp.data'
+\set filename :abs_srcdir '/data/emp.data'
+copy tab_progress_reporting from :'filename'
where (salary < 2000);
drop trigger check_after_tab_progress_reporting on tab_progress_reporting;
diff --git a/src/test/regress/input/create_function_0.source b/src/test/regress/input/create_function_0.source
index f47f635789a..c5224742f94 100644
--- a/src/test/regress/input/create_function_0.source
+++ b/src/test/regress/input/create_function_0.source
@@ -2,70 +2,78 @@
-- CREATE_FUNCTION_0
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+
+\set autoinclib :libdir '/autoinc' :dlsuffix
+\set refintlib :libdir '/refint' :dlsuffix
+\set regresslib :libdir '/regress' :dlsuffix
+
-- Create a bunch of C functions that will be used by later tests:
CREATE FUNCTION check_primary_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION check_foreign_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION autoinc ()
RETURNS trigger
- AS '@libdir@/autoinc@DLSUFFIX@'
+ AS :'autoinclib'
LANGUAGE C;
CREATE FUNCTION trigger_return_old ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION ttdummy ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION set_ttdummy (int4)
RETURNS int4
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
- AS '@libdir@/regress@DLSUFFIX@', 'test_fdw_handler'
+ AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
CREATE FUNCTION test_support_func(internal)
RETURNS internal
- AS '@libdir@/regress@DLSUFFIX@', 'test_support_func'
+ AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@', 'test_opclass_options_func'
+ AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
- AS '@libdir@/regress@DLSUFFIX@', 'test_enc_conversion'
+ AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@', 'binary_coercible'
+ AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
-- Things that shouldn't work:
@@ -88,8 +96,13 @@ CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
AS 'nosuchfile';
+-- To produce stable regression test output, we have to filter the name
+-- of the regresslib file out of the error message in this test.
+\set VERBOSITY sqlstate
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
- AS '@libdir@/regress@DLSUFFIX@', 'nosuchsymbol';
+ AS :'regresslib', 'nosuchsymbol';
+\set VERBOSITY default
+SELECT regexp_replace(:'LAST_ERROR_MESSAGE', 'file ".*"', 'file "..."');
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE internal
AS 'nosuch';
diff --git a/src/test/regress/input/create_function_1.source b/src/test/regress/input/create_function_1.source
index 79a41562bb0..4170b16fe6b 100644
--- a/src/test/regress/input/create_function_1.source
+++ b/src/test/regress/input/create_function_1.source
@@ -2,24 +2,30 @@
-- CREATE_FUNCTION_1
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+
+\set regresslib :libdir '/regress' :dlsuffix
+
-- Create C functions needed by create_type.sql
CREATE FUNCTION widget_in(cstring)
RETURNS widget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION widget_out(widget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
diff --git a/src/test/regress/input/create_function_2.source b/src/test/regress/input/create_function_2.source
index 9e6d2942ec7..67510aed239 100644
--- a/src/test/regress/input/create_function_2.source
+++ b/src/test/regress/input/create_function_2.source
@@ -1,6 +1,14 @@
--
-- CREATE_FUNCTION_2
--
+
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+
+\set regresslib :libdir '/regress' :dlsuffix
+
+
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
@@ -64,25 +72,25 @@ CREATE FUNCTION equipment_named_ambiguous_2b(hobby text)
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION overpaid(emp)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
--
-- Function dynamic loading
--
-LOAD '@libdir@/regress@DLSUFFIX@';
+LOAD :'regresslib';
diff --git a/src/test/regress/input/largeobject.source b/src/test/regress/input/largeobject.source
index b1e7ae99096..16da077f3a2 100644
--- a/src/test/regress/input/largeobject.source
+++ b/src/test/regress/input/largeobject.source
@@ -2,6 +2,10 @@
-- Test large object support
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
-- ensure consistent test output regardless of the default bytea format
SET bytea_output TO escape;
@@ -124,16 +128,13 @@ BEGIN;
SELECT lo_open(loid, x'40000'::int) from lotest_stash_values;
ABORT;
-DO $$
-DECLARE
- loid oid;
-BEGIN
- SELECT tbl.loid INTO loid FROM lotest_stash_values tbl;
- PERFORM lo_export(loid, '@abs_builddir@/results/invalid/path');
-EXCEPTION
- WHEN UNDEFINED_FILE THEN RAISE NOTICE 'could not open file, as expected';
-END;
-$$;
+\set filename :abs_builddir '/results/invalid/path'
+\set dobody 'DECLARE loid oid; BEGIN '
+\set dobody :dobody 'SELECT tbl.loid INTO loid FROM lotest_stash_values tbl; '
+\set dobody :dobody 'PERFORM lo_export(loid, ' :'filename' '); '
+\set dobody :dobody 'EXCEPTION WHEN UNDEFINED_FILE THEN '
+\set dobody :dobody 'RAISE NOTICE ''could not open file, as expected''; END'
+DO :'dobody';
-- Test truncation.
BEGIN;
@@ -183,7 +184,8 @@ SELECT lo_unlink(loid) from lotest_stash_values;
TRUNCATE lotest_stash_values;
-INSERT INTO lotest_stash_values (loid) SELECT lo_import('@abs_srcdir@/data/tenk.data');
+\set filename :abs_srcdir '/data/tenk.data'
+INSERT INTO lotest_stash_values (loid) SELECT lo_import(:'filename');
BEGIN;
UPDATE lotest_stash_values SET fd=lo_open(loid, CAST(x'20000' | x'40000' AS integer));
@@ -212,14 +214,16 @@ SELECT loread(fd, 36) FROM lotest_stash_values;
SELECT lo_close(fd) FROM lotest_stash_values;
END;
-SELECT lo_export(loid, '@abs_builddir@/results/lotest.txt') FROM lotest_stash_values;
+\set filename :abs_builddir '/results/lotest.txt'
+SELECT lo_export(loid, :'filename') FROM lotest_stash_values;
-\lo_import '@abs_builddir@/results/lotest.txt'
+\lo_import :filename
\set newloid :LASTOID
-- just make sure \lo_export does not barf
-\lo_export :newloid '@abs_builddir@/results/lotest2.txt'
+\set filename :abs_builddir '/results/lotest2.txt'
+\lo_export :newloid :filename
-- This is a hack to test that export/import are reversible
-- This uses knowledge about the inner workings of large object mechanism
@@ -234,7 +238,8 @@ TRUNCATE lotest_stash_values;
\lo_unlink :newloid
-\lo_import '@abs_builddir@/results/lotest.txt'
+\set filename :abs_builddir '/results/lotest.txt'
+\lo_import :filename
\set newloid_1 :LASTOID
diff --git a/src/test/regress/input/misc.source b/src/test/regress/input/misc.source
index b1dbc573c9b..a1e2f779ba7 100644
--- a/src/test/regress/input/misc.source
+++ b/src/test/regress/input/misc.source
@@ -2,6 +2,10 @@
-- MISC
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
+
--
-- BTREE
--
@@ -51,25 +55,27 @@ DROP TABLE tmp;
--
-- copy
--
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
+COPY onek FROM :'filename';
SELECT unique1 FROM onek WHERE unique1 < 2 ORDER BY unique1;
DELETE FROM onek2;
-COPY onek2 FROM '@abs_builddir@/results/onek.data';
+COPY onek2 FROM :'filename';
SELECT unique1 FROM onek2 WHERE unique1 < 2 ORDER BY unique1;
-COPY BINARY stud_emp TO '@abs_builddir@/results/stud_emp.data';
+\set filename :abs_builddir '/results/stud_emp.data'
+COPY BINARY stud_emp TO :'filename';
DELETE FROM stud_emp;
-COPY BINARY stud_emp FROM '@abs_builddir@/results/stud_emp.data';
+COPY BINARY stud_emp FROM :'filename';
SELECT * FROM stud_emp;
diff --git a/src/test/regress/input/tablespace.source b/src/test/regress/input/tablespace.source
index c133e73499f..92076db9a13 100644
--- a/src/test/regress/input/tablespace.source
+++ b/src/test/regress/input/tablespace.source
@@ -1,6 +1,11 @@
+-- directory paths are passed to us in environment variables
+\getenv abs_builddir PG_ABS_BUILDDIR
+
+\set testtablespace :abs_builddir '/testtablespace'
+
-- create a tablespace using WITH clause
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (some_nonexistent_parameter = true); -- fail
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (random_page_cost = 3.0); -- ok
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (some_nonexistent_parameter = true); -- fail
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (random_page_cost = 3.0); -- ok
-- check to see the parameter was used
SELECT spcoptions FROM pg_tablespace WHERE spcname = 'regress_tblspacewith';
@@ -9,7 +14,7 @@ SELECT spcoptions FROM pg_tablespace WHERE spcname = 'regress_tblspacewith';
DROP TABLESPACE regress_tblspacewith;
-- create a tablespace we can use
-CREATE TABLESPACE regress_tblspace LOCATION '@testtablespace@';
+CREATE TABLESPACE regress_tblspace LOCATION :'testtablespace';
-- try setting and resetting some properties for the new tablespace
ALTER TABLESPACE regress_tblspace SET (random_page_cost = 1.0, seq_page_cost = 1.1);
diff --git a/src/test/regress/output/constraints.source b/src/test/regress/output/constraints.source
index eff793cc3d3..e32cf8bb574 100644
--- a/src/test/regress/output/constraints.source
+++ b/src/test/regress/output/constraints.source
@@ -7,6 +7,8 @@
-- - UNIQUE clauses
-- - EXCLUDE clauses
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
--
-- DEFAULT syntax
--
@@ -346,7 +348,8 @@ SELECT * FROM INSERT_TBL;
CREATE TABLE COPY_TBL (x INT, y TEXT, z INT,
CONSTRAINT COPY_CON
CHECK (x > 3 AND y <> 'check failed' AND x < 7 ));
-COPY COPY_TBL FROM '@abs_srcdir@/data/constro.data';
+\set filename :abs_srcdir '/data/constro.data'
+COPY COPY_TBL FROM :'filename';
SELECT * FROM COPY_TBL;
x | y | z
---+---------------+---
@@ -354,7 +357,8 @@ SELECT * FROM COPY_TBL;
6 | OK | 4
(2 rows)
-COPY COPY_TBL FROM '@abs_srcdir@/data/constrf.data';
+\set filename :abs_srcdir '/data/constrf.data'
+COPY COPY_TBL FROM :'filename';
ERROR: new row for relation "copy_tbl" violates check constraint "copy_con"
DETAIL: Failing row contains (7, check failed, 6).
CONTEXT: COPY copy_tbl, line 2: "7 check failed 6"
diff --git a/src/test/regress/output/copy.source b/src/test/regress/output/copy.source
index 25bdec6c60e..931e7b2e699 100644
--- a/src/test/regress/output/copy.source
+++ b/src/test/regress/output/copy.source
@@ -1,40 +1,64 @@
--
-- COPY
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
-- CLASS POPULATION
-- (any resemblance to real life is purely coincidental)
--
-COPY aggtest FROM '@abs_srcdir@/data/agg.data';
-COPY onek FROM '@abs_srcdir@/data/onek.data';
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_srcdir '/data/agg.data'
+COPY aggtest FROM :'filename';
+\set filename :abs_srcdir '/data/onek.data'
+COPY onek FROM :'filename';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
-COPY tenk1 FROM '@abs_srcdir@/data/tenk.data';
-COPY slow_emp4000 FROM '@abs_srcdir@/data/rect.data';
-COPY person FROM '@abs_srcdir@/data/person.data';
-COPY emp FROM '@abs_srcdir@/data/emp.data';
-COPY student FROM '@abs_srcdir@/data/student.data';
-COPY stud_emp FROM '@abs_srcdir@/data/stud_emp.data';
-COPY road FROM '@abs_srcdir@/data/streets.data';
-COPY real_city FROM '@abs_srcdir@/data/real_city.data';
-COPY hash_i4_heap FROM '@abs_srcdir@/data/hash.data';
-COPY hash_name_heap FROM '@abs_srcdir@/data/hash.data';
-COPY hash_txt_heap FROM '@abs_srcdir@/data/hash.data';
-COPY hash_f8_heap FROM '@abs_srcdir@/data/hash.data';
-COPY test_tsvector FROM '@abs_srcdir@/data/tsearch.data';
-COPY testjsonb FROM '@abs_srcdir@/data/jsonb.data';
+COPY onek FROM :'filename';
+\set filename :abs_srcdir '/data/tenk.data'
+COPY tenk1 FROM :'filename';
+\set filename :abs_srcdir '/data/rect.data'
+COPY slow_emp4000 FROM :'filename';
+\set filename :abs_srcdir '/data/person.data'
+COPY person FROM :'filename';
+\set filename :abs_srcdir '/data/emp.data'
+COPY emp FROM :'filename';
+\set filename :abs_srcdir '/data/student.data'
+COPY student FROM :'filename';
+\set filename :abs_srcdir '/data/stud_emp.data'
+COPY stud_emp FROM :'filename';
+\set filename :abs_srcdir '/data/streets.data'
+COPY road FROM :'filename';
+\set filename :abs_srcdir '/data/real_city.data'
+COPY real_city FROM :'filename';
+\set filename :abs_srcdir '/data/hash.data'
+COPY hash_i4_heap FROM :'filename';
+COPY hash_name_heap FROM :'filename';
+COPY hash_txt_heap FROM :'filename';
+COPY hash_f8_heap FROM :'filename';
+\set filename :abs_srcdir '/data/tsearch.data'
+COPY test_tsvector FROM :'filename';
+\set filename :abs_srcdir '/data/jsonb.data'
+COPY testjsonb FROM :'filename';
-- the data in this file has a lot of duplicates in the index key
-- fields, leading to long bucket chains and lots of table expansion.
-- this is therefore a stress test of the bucket overflow code (unlike
-- the data in hash.data, which has unique index keys).
--
--- COPY hash_ovfl_heap FROM '@abs_srcdir@/data/hashovfl.data';
-COPY bt_i4_heap FROM '@abs_srcdir@/data/desc.data';
-COPY bt_name_heap FROM '@abs_srcdir@/data/hash.data';
-COPY bt_txt_heap FROM '@abs_srcdir@/data/desc.data';
-COPY bt_f8_heap FROM '@abs_srcdir@/data/hash.data';
-COPY array_op_test FROM '@abs_srcdir@/data/array.data';
-COPY array_index_op_test FROM '@abs_srcdir@/data/array.data';
+-- \set filename :abs_srcdir '/data/hashovfl.data'
+-- COPY hash_ovfl_heap FROM :'filename';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_i4_heap FROM :'filename';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_name_heap FROM :'filename';
+\set filename :abs_srcdir '/data/desc.data'
+COPY bt_txt_heap FROM :'filename';
+\set filename :abs_srcdir '/data/hash.data'
+COPY bt_f8_heap FROM :'filename';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_op_test FROM :'filename';
+\set filename :abs_srcdir '/data/array.data'
+COPY array_index_op_test FROM :'filename';
-- analyze all the data we just loaded, to ensure plan consistency
-- in later tests
ANALYZE aggtest;
@@ -68,9 +92,10 @@ insert into copytest values('DOS',E'abc\r\ndef',1);
insert into copytest values('Unix',E'abc\ndef',2);
insert into copytest values('Mac',E'abc\rdef',3);
insert into copytest values(E'esc\\ape',E'a\\r\\\r\\\n\\nb',4);
-copy copytest to '@abs_builddir@/results/copytest.csv' csv;
+\set filename :abs_builddir '/results/copytest.csv'
+copy copytest to :'filename' csv;
create temp table copytest2 (like copytest);
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv;
+copy copytest2 from :'filename' csv;
select * from copytest except select * from copytest2;
style | test | filler
-------+------+--------
@@ -78,8 +103,8 @@ select * from copytest except select * from copytest2;
truncate copytest2;
--- same test but with an escape char different from quote char
-copy copytest to '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
-copy copytest2 from '@abs_builddir@/results/copytest.csv' csv quote '''' escape E'\\';
+copy copytest to :'filename' csv quote '''' escape E'\\';
+copy copytest2 from :'filename' csv quote '''' escape E'\\';
select * from copytest except select * from copytest2;
style | test | filler
-------+------+--------
@@ -110,13 +135,14 @@ alter table parted_copytest attach partition parted_copytest_a2 for values in(2)
insert into parted_copytest select x,1,'One' from generate_series(1,1000) x;
insert into parted_copytest select x,2,'Two' from generate_series(1001,1010) x;
insert into parted_copytest select x,1,'One' from generate_series(1011,1020) x;
-copy (select * from parted_copytest order by a) to '@abs_builddir@/results/parted_copytest.csv';
+\set filename :abs_builddir '/results/parted_copytest.csv'
+copy (select * from parted_copytest order by a) to :'filename';
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
-- Ensure COPY FREEZE errors for partitioned tables.
begin;
truncate parted_copytest;
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv' (freeze);
+copy parted_copytest from :'filename' (freeze);
ERROR: cannot perform COPY FREEZE on a partitioned table
rollback;
select tableoid::regclass,count(*),sum(a) from parted_copytest
@@ -138,7 +164,7 @@ create trigger part_ins_trig
before insert on parted_copytest_a2
for each row
execute procedure part_ins_func();
-copy parted_copytest from '@abs_builddir@/results/parted_copytest.csv';
+copy parted_copytest from :'filename';
select tableoid::regclass,count(*),sum(a) from parted_copytest
group by tableoid order by tableoid::regclass::name;
tableoid | count | sum
@@ -213,7 +239,8 @@ copy tab_progress_reporting from stdin;
INFO: progress: {"type": "PIPE", "command": "COPY FROM", "relname": "tab_progress_reporting", "has_bytes_total": false, "tuples_excluded": 0, "tuples_processed": 3, "has_bytes_processed": true}
-- Generate COPY FROM report with FILE, with some excluded tuples.
truncate tab_progress_reporting;
-copy tab_progress_reporting from '@abs_srcdir@/data/emp.data'
+\set filename :abs_srcdir '/data/emp.data'
+copy tab_progress_reporting from :'filename'
where (salary < 2000);
INFO: progress: {"type": "FILE", "command": "COPY FROM", "relname": "tab_progress_reporting", "has_bytes_total": true, "tuples_excluded": 1, "tuples_processed": 2, "has_bytes_processed": true}
drop trigger check_after_tab_progress_reporting on tab_progress_reporting;
diff --git a/src/test/regress/output/create_function_0.source b/src/test/regress/output/create_function_0.source
index 342bc40e115..6e96d6c5d66 100644
--- a/src/test/regress/output/create_function_0.source
+++ b/src/test/regress/output/create_function_0.source
@@ -1,57 +1,63 @@
--
-- CREATE_FUNCTION_0
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+\set autoinclib :libdir '/autoinc' :dlsuffix
+\set refintlib :libdir '/refint' :dlsuffix
+\set regresslib :libdir '/regress' :dlsuffix
-- Create a bunch of C functions that will be used by later tests:
CREATE FUNCTION check_primary_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION check_foreign_key ()
RETURNS trigger
- AS '@libdir@/refint@DLSUFFIX@'
+ AS :'refintlib'
LANGUAGE C;
CREATE FUNCTION autoinc ()
RETURNS trigger
- AS '@libdir@/autoinc@DLSUFFIX@'
+ AS :'autoinclib'
LANGUAGE C;
CREATE FUNCTION trigger_return_old ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION ttdummy ()
RETURNS trigger
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION set_ttdummy (int4)
RETURNS int4
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION make_tuple_indirect (record)
RETURNS record
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION test_atomic_ops()
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C;
CREATE FUNCTION test_fdw_handler()
RETURNS fdw_handler
- AS '@libdir@/regress@DLSUFFIX@', 'test_fdw_handler'
+ AS :'regresslib', 'test_fdw_handler'
LANGUAGE C;
CREATE FUNCTION test_support_func(internal)
RETURNS internal
- AS '@libdir@/regress@DLSUFFIX@', 'test_support_func'
+ AS :'regresslib', 'test_support_func'
LANGUAGE C STRICT;
CREATE FUNCTION test_opclass_options_func(internal)
RETURNS void
- AS '@libdir@/regress@DLSUFFIX@', 'test_opclass_options_func'
+ AS :'regresslib', 'test_opclass_options_func'
LANGUAGE C;
CREATE FUNCTION test_enc_conversion(bytea, name, name, bool, validlen OUT int, result OUT bytea)
- AS '@libdir@/regress@DLSUFFIX@', 'test_enc_conversion'
+ AS :'regresslib', 'test_enc_conversion'
LANGUAGE C STRICT;
CREATE FUNCTION binary_coercible(oid, oid)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@', 'binary_coercible'
+ AS :'regresslib', 'binary_coercible'
LANGUAGE C STRICT STABLE PARALLEL SAFE;
-- Things that shouldn't work:
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE SQL
@@ -80,9 +86,19 @@ ERROR: only one AS item needed for language "sql"
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
AS 'nosuchfile';
ERROR: could not access file "nosuchfile": No such file or directory
+-- To produce stable regression test output, we have to filter the name
+-- of the regresslib file out of the error message in this test.
+\set VERBOSITY sqlstate
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE C
- AS '@libdir@/regress@DLSUFFIX@', 'nosuchsymbol';
-ERROR: could not find function "nosuchsymbol" in file "@libdir@/regress@DLSUFFIX@"
+ AS :'regresslib', 'nosuchsymbol';
+ERROR: 42883
+\set VERBOSITY default
+SELECT regexp_replace(:'LAST_ERROR_MESSAGE', 'file ".*"', 'file "..."');
+ regexp_replace
+------------------------------------------------------
+ could not find function "nosuchsymbol" in file "..."
+(1 row)
+
CREATE FUNCTION test1 (int) RETURNS int LANGUAGE internal
AS 'nosuch';
ERROR: there is no built-in function named "nosuch"
diff --git a/src/test/regress/output/create_function_1.source b/src/test/regress/output/create_function_1.source
index 616b610e862..5345ed08400 100644
--- a/src/test/regress/output/create_function_1.source
+++ b/src/test/regress/output/create_function_1.source
@@ -1,26 +1,30 @@
--
-- CREATE_FUNCTION_1
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+\set regresslib :libdir '/regress' :dlsuffix
-- Create C functions needed by create_type.sql
CREATE FUNCTION widget_in(cstring)
RETURNS widget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "widget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION widget_out(widget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type widget is only a shell
CREATE FUNCTION int44in(cstring)
RETURNS city_budget
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: type "city_budget" is not yet defined
DETAIL: Creating a shell type definition.
CREATE FUNCTION int44out(city_budget)
RETURNS cstring
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT IMMUTABLE;
NOTICE: argument type city_budget is only a shell
diff --git a/src/test/regress/output/create_function_2.source b/src/test/regress/output/create_function_2.source
index ac9a7f5cf8d..a366294add7 100644
--- a/src/test/regress/output/create_function_2.source
+++ b/src/test/regress/output/create_function_2.source
@@ -1,6 +1,10 @@
--
-- CREATE_FUNCTION_2
--
+-- directory path and dlsuffix are passed to us in environment variables
+\getenv libdir PG_LIBDIR
+\getenv dlsuffix PG_DLSUFFIX
+\set regresslib :libdir '/regress' :dlsuffix
CREATE FUNCTION hobbies(person)
RETURNS setof hobbies_r
AS 'select * from hobbies_r where person = $1.name'
@@ -49,21 +53,21 @@ CREATE FUNCTION equipment_named_ambiguous_2b(hobby text)
LANGUAGE SQL;
CREATE FUNCTION pt_in_widget(point, widget)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION overpaid(emp)
RETURNS bool
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION interpt_pp(path, path)
RETURNS point
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
CREATE FUNCTION reverse_name(name)
RETURNS name
- AS '@libdir@/regress@DLSUFFIX@'
+ AS :'regresslib'
LANGUAGE C STRICT;
--
-- Function dynamic loading
--
-LOAD '@libdir@/regress@DLSUFFIX@';
+LOAD :'regresslib';
diff --git a/src/test/regress/output/largeobject.source b/src/test/regress/output/largeobject.source
index 91d33b4d0c7..f461ca3b9f4 100644
--- a/src/test/regress/output/largeobject.source
+++ b/src/test/regress/output/largeobject.source
@@ -1,6 +1,9 @@
--
-- Test large object support
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
-- ensure consistent test output regardless of the default bytea format
SET bytea_output TO escape;
-- Load a file
@@ -161,16 +164,13 @@ SELECT lo_open(loid, x'40000'::int) from lotest_stash_values;
(1 row)
ABORT;
-DO $$
-DECLARE
- loid oid;
-BEGIN
- SELECT tbl.loid INTO loid FROM lotest_stash_values tbl;
- PERFORM lo_export(loid, '@abs_builddir@/results/invalid/path');
-EXCEPTION
- WHEN UNDEFINED_FILE THEN RAISE NOTICE 'could not open file, as expected';
-END;
-$$;
+\set filename :abs_builddir '/results/invalid/path'
+\set dobody 'DECLARE loid oid; BEGIN '
+\set dobody :dobody 'SELECT tbl.loid INTO loid FROM lotest_stash_values tbl; '
+\set dobody :dobody 'PERFORM lo_export(loid, ' :'filename' '); '
+\set dobody :dobody 'EXCEPTION WHEN UNDEFINED_FILE THEN '
+\set dobody :dobody 'RAISE NOTICE ''could not open file, as expected''; END'
+DO :'dobody';
NOTICE: could not open file, as expected
-- Test truncation.
BEGIN;
@@ -327,7 +327,8 @@ SELECT lo_unlink(loid) from lotest_stash_values;
(1 row)
TRUNCATE lotest_stash_values;
-INSERT INTO lotest_stash_values (loid) SELECT lo_import('@abs_srcdir@/data/tenk.data');
+\set filename :abs_srcdir '/data/tenk.data'
+INSERT INTO lotest_stash_values (loid) SELECT lo_import(:'filename');
BEGIN;
UPDATE lotest_stash_values SET fd=lo_open(loid, CAST(x'20000' | x'40000' AS integer));
-- verify length of large object
@@ -390,16 +391,18 @@ SELECT lo_close(fd) FROM lotest_stash_values;
(1 row)
END;
-SELECT lo_export(loid, '@abs_builddir@/results/lotest.txt') FROM lotest_stash_values;
+\set filename :abs_builddir '/results/lotest.txt'
+SELECT lo_export(loid, :'filename') FROM lotest_stash_values;
lo_export
-----------
1
(1 row)
-\lo_import '@abs_builddir@/results/lotest.txt'
+\lo_import :filename
\set newloid :LASTOID
-- just make sure \lo_export does not barf
-\lo_export :newloid '@abs_builddir@/results/lotest2.txt'
+\set filename :abs_builddir '/results/lotest2.txt'
+\lo_export :newloid :filename
-- This is a hack to test that export/import are reversible
-- This uses knowledge about the inner workings of large object mechanism
-- which should not be used outside it. This makes it a HACK
@@ -418,7 +421,8 @@ SELECT lo_unlink(loid) FROM lotest_stash_values;
TRUNCATE lotest_stash_values;
\lo_unlink :newloid
-\lo_import '@abs_builddir@/results/lotest.txt'
+\set filename :abs_builddir '/results/lotest.txt'
+\lo_import :filename
\set newloid_1 :LASTOID
SELECT lo_from_bytea(0, lo_get(:newloid_1)) AS newloid_2
\gset
diff --git a/src/test/regress/output/largeobject_1.source b/src/test/regress/output/largeobject_1.source
index cb910e2eefd..a9725c375d5 100644
--- a/src/test/regress/output/largeobject_1.source
+++ b/src/test/regress/output/largeobject_1.source
@@ -1,6 +1,9 @@
--
-- Test large object support
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
-- ensure consistent test output regardless of the default bytea format
SET bytea_output TO escape;
-- Load a file
@@ -161,16 +164,13 @@ SELECT lo_open(loid, x'40000'::int) from lotest_stash_values;
(1 row)
ABORT;
-DO $$
-DECLARE
- loid oid;
-BEGIN
- SELECT tbl.loid INTO loid FROM lotest_stash_values tbl;
- PERFORM lo_export(loid, '@abs_builddir@/results/invalid/path');
-EXCEPTION
- WHEN UNDEFINED_FILE THEN RAISE NOTICE 'could not open file, as expected';
-END;
-$$;
+\set filename :abs_builddir '/results/invalid/path'
+\set dobody 'DECLARE loid oid; BEGIN '
+\set dobody :dobody 'SELECT tbl.loid INTO loid FROM lotest_stash_values tbl; '
+\set dobody :dobody 'PERFORM lo_export(loid, ' :'filename' '); '
+\set dobody :dobody 'EXCEPTION WHEN UNDEFINED_FILE THEN '
+\set dobody :dobody 'RAISE NOTICE ''could not open file, as expected''; END'
+DO :'dobody';
NOTICE: could not open file, as expected
-- Test truncation.
BEGIN;
@@ -327,7 +327,8 @@ SELECT lo_unlink(loid) from lotest_stash_values;
(1 row)
TRUNCATE lotest_stash_values;
-INSERT INTO lotest_stash_values (loid) SELECT lo_import('@abs_srcdir@/data/tenk.data');
+\set filename :abs_srcdir '/data/tenk.data'
+INSERT INTO lotest_stash_values (loid) SELECT lo_import(:'filename');
BEGIN;
UPDATE lotest_stash_values SET fd=lo_open(loid, CAST(x'20000' | x'40000' AS integer));
-- verify length of large object
@@ -390,16 +391,18 @@ SELECT lo_close(fd) FROM lotest_stash_values;
(1 row)
END;
-SELECT lo_export(loid, '@abs_builddir@/results/lotest.txt') FROM lotest_stash_values;
+\set filename :abs_builddir '/results/lotest.txt'
+SELECT lo_export(loid, :'filename') FROM lotest_stash_values;
lo_export
-----------
1
(1 row)
-\lo_import '@abs_builddir@/results/lotest.txt'
+\lo_import :filename
\set newloid :LASTOID
-- just make sure \lo_export does not barf
-\lo_export :newloid '@abs_builddir@/results/lotest2.txt'
+\set filename :abs_builddir '/results/lotest2.txt'
+\lo_export :newloid :filename
-- This is a hack to test that export/import are reversible
-- This uses knowledge about the inner workings of large object mechanism
-- which should not be used outside it. This makes it a HACK
@@ -418,7 +421,8 @@ SELECT lo_unlink(loid) FROM lotest_stash_values;
TRUNCATE lotest_stash_values;
\lo_unlink :newloid
-\lo_import '@abs_builddir@/results/lotest.txt'
+\set filename :abs_builddir '/results/lotest.txt'
+\lo_import :filename
\set newloid_1 :LASTOID
SELECT lo_from_bytea(0, lo_get(:newloid_1)) AS newloid_2
\gset
diff --git a/src/test/regress/output/misc.source b/src/test/regress/output/misc.source
index b9595cc2391..dd65e6ed62f 100644
--- a/src/test/regress/output/misc.source
+++ b/src/test/regress/output/misc.source
@@ -1,6 +1,9 @@
--
-- MISC
--
+-- directory paths are passed to us in environment variables
+\getenv abs_srcdir PG_ABS_SRCDIR
+\getenv abs_builddir PG_ABS_BUILDDIR
--
-- BTREE
--
@@ -41,9 +44,10 @@ DROP TABLE tmp;
--
-- copy
--
-COPY onek TO '@abs_builddir@/results/onek.data';
+\set filename :abs_builddir '/results/onek.data'
+COPY onek TO :'filename';
DELETE FROM onek;
-COPY onek FROM '@abs_builddir@/results/onek.data';
+COPY onek FROM :'filename';
SELECT unique1 FROM onek WHERE unique1 < 2 ORDER BY unique1;
unique1
---------
@@ -52,7 +56,7 @@ SELECT unique1 FROM onek WHERE unique1 < 2 ORDER BY unique1;
(2 rows)
DELETE FROM onek2;
-COPY onek2 FROM '@abs_builddir@/results/onek.data';
+COPY onek2 FROM :'filename';
SELECT unique1 FROM onek2 WHERE unique1 < 2 ORDER BY unique1;
unique1
---------
@@ -60,9 +64,10 @@ SELECT unique1 FROM onek2 WHERE unique1 < 2 ORDER BY unique1;
1
(2 rows)
-COPY BINARY stud_emp TO '@abs_builddir@/results/stud_emp.data';
+\set filename :abs_builddir '/results/stud_emp.data'
+COPY BINARY stud_emp TO :'filename';
DELETE FROM stud_emp;
-COPY BINARY stud_emp FROM '@abs_builddir@/results/stud_emp.data';
+COPY BINARY stud_emp FROM :'filename';
SELECT * FROM stud_emp;
name | age | location | salary | manager | gpa | percent
-------+-----+------------+--------+---------+-----+---------
diff --git a/src/test/regress/output/tablespace.source b/src/test/regress/output/tablespace.source
index 1bbe7e03236..864f4b6e208 100644
--- a/src/test/regress/output/tablespace.source
+++ b/src/test/regress/output/tablespace.source
@@ -1,7 +1,10 @@
+-- directory paths are passed to us in environment variables
+\getenv abs_builddir PG_ABS_BUILDDIR
+\set testtablespace :abs_builddir '/testtablespace'
-- create a tablespace using WITH clause
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (some_nonexistent_parameter = true); -- fail
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (some_nonexistent_parameter = true); -- fail
ERROR: unrecognized parameter "some_nonexistent_parameter"
-CREATE TABLESPACE regress_tblspacewith LOCATION '@testtablespace@' WITH (random_page_cost = 3.0); -- ok
+CREATE TABLESPACE regress_tblspacewith LOCATION :'testtablespace' WITH (random_page_cost = 3.0); -- ok
-- check to see the parameter was used
SELECT spcoptions FROM pg_tablespace WHERE spcname = 'regress_tblspacewith';
spcoptions
@@ -12,7 +15,7 @@ SELECT spcoptions FROM pg_tablespace WHERE spcname = 'regress_tblspacewith';
-- drop the tablespace so we can re-use the location
DROP TABLESPACE regress_tblspacewith;
-- create a tablespace we can use
-CREATE TABLESPACE regress_tblspace LOCATION '@testtablespace@';
+CREATE TABLESPACE regress_tblspace LOCATION :'testtablespace';
-- try setting and resetting some properties for the new tablespace
ALTER TABLESPACE regress_tblspace SET (random_page_cost = 1.0, seq_page_cost = 1.1);
ALTER TABLESPACE regress_tblspace SET (some_nonexistent_parameter = true); -- fail
diff --git a/src/test/regress/pg_regress.c b/src/test/regress/pg_regress.c
index 2c8a600bad1..c2fcff55bfb 100644
--- a/src/test/regress/pg_regress.c
+++ b/src/test/regress/pg_regress.c
@@ -746,6 +746,14 @@ initialize_environment(void)
*/
setenv("PGAPPNAME", "pg_regress", 1);
+ /*
+ * Set variables that the test scripts may need to refer to.
+ */
+ setenv("PG_ABS_SRCDIR", inputdir, 1);
+ setenv("PG_ABS_BUILDDIR", outputdir, 1);
+ setenv("PG_LIBDIR", dlpath, 1);
+ setenv("PG_DLSUFFIX", DLSUFFIX, 1);
+
if (nolocale)
{
/*