mirror of
https://github.com/postgres/postgres.git
synced 2025-07-22 00:01:40 -04:00
Compare commits
11 Commits
2c7c6c417f
...
c2bdd2c5b1
Author | SHA1 | Date | |
---|---|---|---|
|
c2bdd2c5b1 | ||
|
ac7d6f5f83 | ||
|
18b585155a | ||
|
3b0776fde5 | ||
|
721856ff24 | ||
|
b72de09a1b | ||
|
3a9b18b309 | ||
|
526fe0d799 | ||
|
7b5275eec3 | ||
|
6ceec8a1fe | ||
|
93c85db3b5 |
@ -41,7 +41,7 @@ install-world-bin-contrib-recurse: install-world-bin-src-recurse
|
||||
|
||||
$(call recurse,installdirs uninstall init-po update-po,doc src config)
|
||||
|
||||
$(call recurse,distprep coverage,doc src config contrib)
|
||||
$(call recurse,coverage,doc src config contrib)
|
||||
|
||||
# clean, distclean, etc should apply to contrib too, even though
|
||||
# it's not built by default
|
||||
@ -53,7 +53,7 @@ clean:
|
||||
|
||||
# Important: distclean `src' last, otherwise Makefile.global
|
||||
# will be gone too soon.
|
||||
distclean maintainer-clean:
|
||||
distclean:
|
||||
$(MAKE) -C doc $@
|
||||
$(MAKE) -C contrib $@
|
||||
$(MAKE) -C config $@
|
||||
@ -109,7 +109,6 @@ distdir:
|
||||
|| cp "$(top_srcdir)/$$file" "$(distdir)/$$file"; \
|
||||
fi || exit; \
|
||||
done
|
||||
$(MAKE) -C $(distdir) distprep
|
||||
$(MAKE) -C $(distdir)/doc/src/sgml/ INSTALL
|
||||
cp $(distdir)/doc/src/sgml/INSTALL $(distdir)/
|
||||
$(MAKE) -C $(distdir) distclean
|
||||
@ -122,7 +121,6 @@ distcheck: dist
|
||||
install_prefix=`cd $(dummy) && pwd`; \
|
||||
cd $(distdir) \
|
||||
&& ./configure --prefix="$$install_prefix"
|
||||
$(MAKE) -C $(distdir) -q distprep
|
||||
$(MAKE) -C $(distdir)
|
||||
$(MAKE) -C $(distdir) install
|
||||
$(MAKE) -C $(distdir) uninstall
|
||||
|
@ -13,19 +13,14 @@ if test "$PERL"; then
|
||||
if echo "$pgac_perl_version" | sed ['s/[.a-z_]/ /g'] | \
|
||||
$AWK '{ if ([$]1 == 5 && ([$]2 >= 14)) exit 1; else exit 0;}'
|
||||
then
|
||||
AC_MSG_WARN([
|
||||
AC_MSG_ERROR([
|
||||
*** The installed version of Perl, $PERL, is too old to use with PostgreSQL.
|
||||
*** Perl version 5.14 or later is required, but this is $pgac_perl_version.])
|
||||
PERL=""
|
||||
fi
|
||||
fi
|
||||
|
||||
if test -z "$PERL"; then
|
||||
AC_MSG_WARN([
|
||||
*** Without Perl you will not be able to build PostgreSQL from Git.
|
||||
*** You can obtain Perl from any CPAN mirror site.
|
||||
*** (If you are using the official distribution of PostgreSQL then you do not
|
||||
*** need to worry about this, because the Perl output is pre-generated.)])
|
||||
AC_MSG_ERROR([Perl not found])
|
||||
fi
|
||||
])# PGAC_PATH_PERL
|
||||
|
||||
|
@ -33,10 +33,9 @@ if test "$BISON"; then
|
||||
AC_MSG_NOTICE([using $pgac_bison_version])
|
||||
if echo "$pgac_bison_version" | $AWK '{ if ([$]4 < 2.3) exit 0; else exit 1;}'
|
||||
then
|
||||
AC_MSG_WARN([
|
||||
AC_MSG_ERROR([
|
||||
*** The installed version of Bison, $BISON, is too old to use with PostgreSQL.
|
||||
*** Bison version 2.3 or later is required, but this is $pgac_bison_version.])
|
||||
BISON=""
|
||||
fi
|
||||
# Bison >=3.0 issues warnings about %name-prefix="base_yy", instead
|
||||
# of the now preferred %name-prefix "base_yy", but the latter
|
||||
@ -49,12 +48,7 @@ if test "$BISON"; then
|
||||
fi
|
||||
|
||||
if test -z "$BISON"; then
|
||||
AC_MSG_WARN([
|
||||
*** Without Bison you will not be able to build PostgreSQL from Git nor
|
||||
*** change any of the parser definition files. You can obtain Bison from
|
||||
*** a GNU mirror site. (If you are using the official distribution of
|
||||
*** PostgreSQL then you do not need to worry about this, because the Bison
|
||||
*** output is pre-generated.)])
|
||||
AC_MSG_ERROR([bison not found])
|
||||
fi
|
||||
dnl We don't need AC_SUBST(BISON) because PGAC_PATH_PROGS did it
|
||||
AC_SUBST(BISONFLAGS)
|
||||
@ -95,7 +89,7 @@ else
|
||||
pgac_cv_path_flex=$pgac_candidate
|
||||
break 2
|
||||
else
|
||||
AC_MSG_WARN([
|
||||
AC_MSG_ERROR([
|
||||
*** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL.
|
||||
*** Flex version 2.5.35 or later is required, but this is $pgac_flex_version.])
|
||||
fi
|
||||
@ -109,14 +103,7 @@ fi
|
||||
])[]dnl AC_CACHE_CHECK
|
||||
|
||||
if test x"$pgac_cv_path_flex" = x"no"; then
|
||||
AC_MSG_WARN([
|
||||
*** Without Flex you will not be able to build PostgreSQL from Git nor
|
||||
*** change any of the scanner definition files. You can obtain Flex from
|
||||
*** a GNU mirror site. (If you are using the official distribution of
|
||||
*** PostgreSQL then you do not need to worry about this because the Flex
|
||||
*** output is pre-generated.)])
|
||||
|
||||
FLEX=
|
||||
AC_MSG_ERROR([flex not found])
|
||||
else
|
||||
FLEX=$pgac_cv_path_flex
|
||||
pgac_flex_version=`$FLEX --version 2>/dev/null`
|
||||
|
62
configure
vendored
62
configure
vendored
@ -10162,13 +10162,9 @@ if test "$BISON"; then
|
||||
$as_echo "$as_me: using $pgac_bison_version" >&6;}
|
||||
if echo "$pgac_bison_version" | $AWK '{ if ($4 < 2.3) exit 0; else exit 1;}'
|
||||
then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
|
||||
as_fn_error $? "
|
||||
*** The installed version of Bison, $BISON, is too old to use with PostgreSQL.
|
||||
*** Bison version 2.3 or later is required, but this is $pgac_bison_version." >&5
|
||||
$as_echo "$as_me: WARNING:
|
||||
*** The installed version of Bison, $BISON, is too old to use with PostgreSQL.
|
||||
*** Bison version 2.3 or later is required, but this is $pgac_bison_version." >&2;}
|
||||
BISON=""
|
||||
*** Bison version 2.3 or later is required, but this is $pgac_bison_version." "$LINENO" 5
|
||||
fi
|
||||
# Bison >=3.0 issues warnings about %name-prefix="base_yy", instead
|
||||
# of the now preferred %name-prefix "base_yy", but the latter
|
||||
@ -10181,18 +10177,7 @@ $as_echo "$as_me: WARNING:
|
||||
fi
|
||||
|
||||
if test -z "$BISON"; then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
|
||||
*** Without Bison you will not be able to build PostgreSQL from Git nor
|
||||
*** change any of the parser definition files. You can obtain Bison from
|
||||
*** a GNU mirror site. (If you are using the official distribution of
|
||||
*** PostgreSQL then you do not need to worry about this, because the Bison
|
||||
*** output is pre-generated.)" >&5
|
||||
$as_echo "$as_me: WARNING:
|
||||
*** Without Bison you will not be able to build PostgreSQL from Git nor
|
||||
*** change any of the parser definition files. You can obtain Bison from
|
||||
*** a GNU mirror site. (If you are using the official distribution of
|
||||
*** PostgreSQL then you do not need to worry about this, because the Bison
|
||||
*** output is pre-generated.)" >&2;}
|
||||
as_fn_error $? "bison not found" "$LINENO" 5
|
||||
fi
|
||||
|
||||
|
||||
@ -10225,12 +10210,9 @@ else
|
||||
pgac_cv_path_flex=$pgac_candidate
|
||||
break 2
|
||||
else
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
|
||||
as_fn_error $? "
|
||||
*** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL.
|
||||
*** Flex version 2.5.35 or later is required, but this is $pgac_flex_version." >&5
|
||||
$as_echo "$as_me: WARNING:
|
||||
*** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL.
|
||||
*** Flex version 2.5.35 or later is required, but this is $pgac_flex_version." >&2;}
|
||||
*** Flex version 2.5.35 or later is required, but this is $pgac_flex_version." "$LINENO" 5
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@ -10244,20 +10226,7 @@ fi
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $pgac_cv_path_flex" >&5
|
||||
$as_echo "$pgac_cv_path_flex" >&6; }
|
||||
if test x"$pgac_cv_path_flex" = x"no"; then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
|
||||
*** Without Flex you will not be able to build PostgreSQL from Git nor
|
||||
*** change any of the scanner definition files. You can obtain Flex from
|
||||
*** a GNU mirror site. (If you are using the official distribution of
|
||||
*** PostgreSQL then you do not need to worry about this because the Flex
|
||||
*** output is pre-generated.)" >&5
|
||||
$as_echo "$as_me: WARNING:
|
||||
*** Without Flex you will not be able to build PostgreSQL from Git nor
|
||||
*** change any of the scanner definition files. You can obtain Flex from
|
||||
*** a GNU mirror site. (If you are using the official distribution of
|
||||
*** PostgreSQL then you do not need to worry about this because the Flex
|
||||
*** output is pre-generated.)" >&2;}
|
||||
|
||||
FLEX=
|
||||
as_fn_error $? "flex not found" "$LINENO" 5
|
||||
else
|
||||
FLEX=$pgac_cv_path_flex
|
||||
pgac_flex_version=`$FLEX --version 2>/dev/null`
|
||||
@ -10331,27 +10300,14 @@ $as_echo "$as_me: using perl $pgac_perl_version" >&6;}
|
||||
if echo "$pgac_perl_version" | sed 's/[.a-z_]/ /g' | \
|
||||
$AWK '{ if ($1 == 5 && ($2 >= 14)) exit 1; else exit 0;}'
|
||||
then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
|
||||
as_fn_error $? "
|
||||
*** The installed version of Perl, $PERL, is too old to use with PostgreSQL.
|
||||
*** Perl version 5.14 or later is required, but this is $pgac_perl_version." >&5
|
||||
$as_echo "$as_me: WARNING:
|
||||
*** The installed version of Perl, $PERL, is too old to use with PostgreSQL.
|
||||
*** Perl version 5.14 or later is required, but this is $pgac_perl_version." >&2;}
|
||||
PERL=""
|
||||
*** Perl version 5.14 or later is required, but this is $pgac_perl_version." "$LINENO" 5
|
||||
fi
|
||||
fi
|
||||
|
||||
if test -z "$PERL"; then
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
|
||||
*** Without Perl you will not be able to build PostgreSQL from Git.
|
||||
*** You can obtain Perl from any CPAN mirror site.
|
||||
*** (If you are using the official distribution of PostgreSQL then you do not
|
||||
*** need to worry about this, because the Perl output is pre-generated.)" >&5
|
||||
$as_echo "$as_me: WARNING:
|
||||
*** Without Perl you will not be able to build PostgreSQL from Git.
|
||||
*** You can obtain Perl from any CPAN mirror site.
|
||||
*** (If you are using the official distribution of PostgreSQL then you do not
|
||||
*** need to worry about this, because the Perl output is pre-generated.)" >&2;}
|
||||
as_fn_error $? "Perl not found" "$LINENO" 5
|
||||
fi
|
||||
|
||||
if test "$with_perl" = yes; then
|
||||
|
@ -18,6 +18,8 @@ REGRESS = cube cube_sci
|
||||
|
||||
SHLIB_LINK += $(filter -lm, $(LIBS))
|
||||
|
||||
EXTRA_CLEAN = cubeparse.h cubeparse.c cubescan.c
|
||||
|
||||
ifdef USE_PGXS
|
||||
PG_CONFIG = pg_config
|
||||
PGXS := $(shell $(PG_CONFIG) --pgxs)
|
||||
@ -37,8 +39,3 @@ cubeparse.c: BISONFLAGS += -d
|
||||
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
cubeparse.o cubescan.o: cubeparse.h
|
||||
|
||||
distprep: cubeparse.c cubescan.c
|
||||
|
||||
maintainer-clean:
|
||||
rm -f cubeparse.h cubeparse.c cubescan.c
|
||||
|
@ -15,6 +15,8 @@ PGFILEDESC = "fuzzystrmatch - similarities and distance between strings"
|
||||
|
||||
REGRESS = fuzzystrmatch fuzzystrmatch_utf8
|
||||
|
||||
EXTRA_CLEAN = daitch_mokotoff.h
|
||||
|
||||
ifdef USE_PGXS
|
||||
PG_CONFIG = pg_config
|
||||
PGXS := $(shell $(PG_CONFIG) --pgxs)
|
||||
@ -31,10 +33,3 @@ daitch_mokotoff.o: daitch_mokotoff.h
|
||||
|
||||
daitch_mokotoff.h: daitch_mokotoff_header.pl
|
||||
$(PERL) $< $@
|
||||
|
||||
# daitch_mokotoff.h is included in tarballs, so it has to be made by
|
||||
# "distprep" and not cleaned except by "maintainer-clean".
|
||||
distprep: daitch_mokotoff.h
|
||||
|
||||
maintainer-clean:
|
||||
rm -f daitch_mokotoff.h
|
||||
|
@ -16,7 +16,7 @@ HEADERS = segdata.h
|
||||
|
||||
REGRESS = security seg
|
||||
|
||||
EXTRA_CLEAN = y.tab.c y.tab.h
|
||||
EXTRA_CLEAN = segparse.h segparse.c segscan.c
|
||||
|
||||
ifdef USE_PGXS
|
||||
PG_CONFIG = pg_config
|
||||
@ -38,8 +38,3 @@ segparse.c: BISONFLAGS += -d
|
||||
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
segparse.o segscan.o: segparse.h
|
||||
|
||||
distprep: segparse.c segscan.c
|
||||
|
||||
maintainer-clean:
|
||||
rm -f segparse.h segparse.c segscan.c
|
||||
|
@ -12,5 +12,5 @@ subdir = doc
|
||||
top_builddir = ..
|
||||
include $(top_builddir)/src/Makefile.global
|
||||
|
||||
all distprep html man install installdirs uninstall clean distclean maintainer-clean:
|
||||
all html man install installdirs uninstall clean distclean:
|
||||
$(MAKE) -C src $@
|
||||
|
@ -4,5 +4,5 @@ subdir = doc/src
|
||||
top_builddir = ../..
|
||||
include $(top_builddir)/src/Makefile.global
|
||||
|
||||
all distprep html man install installdirs uninstall clean distclean maintainer-clean:
|
||||
all html man install installdirs uninstall clean distclean:
|
||||
$(MAKE) -C sgml $@
|
||||
|
@ -7,8 +7,7 @@
|
||||
#----------------------------------------------------------------------------
|
||||
|
||||
# This makefile is for building and installing the documentation.
|
||||
# When a release tarball is created, the documentation files are
|
||||
# prepared using the distprep target. In Git-based trees these files
|
||||
# In Git-based trees these files
|
||||
# don't exist, unless explicitly built, so we skip the installation in
|
||||
# that case.
|
||||
|
||||
@ -28,8 +27,6 @@ include $(top_builddir)/src/Makefile.global
|
||||
|
||||
all: html man
|
||||
|
||||
distprep: html distprep-man
|
||||
|
||||
|
||||
ifndef DBTOEPUB
|
||||
DBTOEPUB = $(missing) dbtoepub
|
||||
@ -77,7 +74,7 @@ postgres-full.xml: postgres.sgml $(ALLSGML)
|
||||
## Man pages
|
||||
##
|
||||
|
||||
man distprep-man: man-stamp
|
||||
man: man-stamp
|
||||
|
||||
man-stamp: stylesheet-man.xsl postgres-full.xml
|
||||
$(XSLTPROC) $(XMLINCLUDE) $(XSLTPROCFLAGS) $(XSLTPROC_MAN_FLAGS) $^
|
||||
@ -89,10 +86,9 @@ man-stamp: stylesheet-man.xsl postgres-full.xml
|
||||
##
|
||||
|
||||
# Technically, this should depend on Makefile.global, but then
|
||||
# version.sgml would need to be rebuilt after every configure run,
|
||||
# even in distribution tarballs. So this is cheating a bit, but it
|
||||
# will achieve the goal of updating the version number when it
|
||||
# changes.
|
||||
# version.sgml would need to be rebuilt after every configure run. So
|
||||
# this is cheating a bit, but it will achieve the goal of updating the
|
||||
# version number when it changes.
|
||||
version.sgml: $(top_srcdir)/configure
|
||||
{ \
|
||||
echo "<!ENTITY version \"$(VERSION)\">"; \
|
||||
@ -305,8 +301,6 @@ clean:
|
||||
rm -f *.texixml *.texi *.info db2texi.refs
|
||||
|
||||
distclean: clean
|
||||
|
||||
maintainer-clean: distclean
|
||||
# HTML
|
||||
rm -fr html/ html-stamp
|
||||
# man
|
||||
|
@ -199,6 +199,15 @@
|
||||
standard.
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<productname>PostgreSQL</productname> does not support the
|
||||
<literal>RETURNING CONTENT</literal> or <literal>RETURNING SEQUENCE</literal>
|
||||
clauses, functions which are defined to have these in the specification
|
||||
are implicitly returning content.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
|
@ -14180,6 +14180,36 @@ CREATE TYPE rainbow AS ENUM ('red', 'orange', 'yellow', 'green', 'blue', 'purple
|
||||
documents for processing in client applications.
|
||||
</para>
|
||||
|
||||
<sect3 id="functions-producing-xml-xmltext">
|
||||
<title><literal>xmltext</literal></title>
|
||||
|
||||
<indexterm>
|
||||
<primary>xmltext</primary>
|
||||
</indexterm>
|
||||
|
||||
<synopsis>
|
||||
<function>xmltext</function> ( <type>text</type> ) <returnvalue>xml</returnvalue>
|
||||
</synopsis>
|
||||
|
||||
<para>
|
||||
The function <function>xmltext</function> returns an XML value with a single
|
||||
text node containing the input argument as its content. Predefined entities
|
||||
like ampersand (<literal><![CDATA[&]]></literal>), left and right angle brackets
|
||||
(<literal><![CDATA[< >]]></literal>), and quotation marks (<literal><![CDATA[""]]></literal>)
|
||||
are escaped.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Example:
|
||||
<screen><![CDATA[
|
||||
SELECT xmltext('< foo & bar >');
|
||||
xmltext
|
||||
-------------------------
|
||||
< foo & bar >
|
||||
]]></screen>
|
||||
</para>
|
||||
</sect3>
|
||||
|
||||
<sect3 id="functions-producing-xml-xmlcomment">
|
||||
<title><literal>xmlcomment</literal></title>
|
||||
|
||||
|
@ -72,8 +72,7 @@ documentation. See standalone-profile.xsl for details.
|
||||
|
||||
Alternatively, <productname>PostgreSQL</productname> can be built using
|
||||
<ulink url="https://mesonbuild.com/">Meson</ulink>. This is currently
|
||||
experimental and only works when building from a Git checkout (not from
|
||||
a distribution tarball). If you choose to use
|
||||
experimental. If you choose to use
|
||||
<application>Meson</application>, then you don't need
|
||||
<acronym>GNU</acronym> <application>make</application>, but the other
|
||||
requirements below still apply.
|
||||
@ -102,6 +101,41 @@ documentation. See standalone-profile.xsl for details.
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<indexterm>
|
||||
<primary>flex</primary>
|
||||
</indexterm>
|
||||
<indexterm>
|
||||
<primary>lex</primary>
|
||||
</indexterm>
|
||||
<indexterm>
|
||||
<primary>bison</primary>
|
||||
</indexterm>
|
||||
<indexterm>
|
||||
<primary>yacc</primary>
|
||||
</indexterm>
|
||||
|
||||
<application>Flex</application> 2.5.35 or later and
|
||||
<application>Bison</application> 2.3 or later are required. Other
|
||||
<application>lex</application> and <application>yacc</application>
|
||||
programs cannot be used.
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<indexterm>
|
||||
<primary>perl</primary>
|
||||
</indexterm>
|
||||
|
||||
<application>Perl</application> 5.14 or later is needed during the build
|
||||
process and to run some test suites. (This requirement is separate from
|
||||
the requirements for building <application>PL/Perl</application>; see
|
||||
below.)
|
||||
</para>
|
||||
</listitem>
|
||||
|
||||
<listitem>
|
||||
<para>
|
||||
<indexterm>
|
||||
@ -315,51 +349,6 @@ documentation. See standalone-profile.xsl for details.
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you are building from a <productname>Git</productname> tree instead of
|
||||
using a released source package, or if you want to do server development,
|
||||
you also need the following packages:
|
||||
|
||||
<itemizedlist>
|
||||
<listitem>
|
||||
<para>
|
||||
<indexterm>
|
||||
<primary>flex</primary>
|
||||
</indexterm>
|
||||
<indexterm>
|
||||
<primary>lex</primary>
|
||||
</indexterm>
|
||||
<indexterm>
|
||||
<primary>bison</primary>
|
||||
</indexterm>
|
||||
<indexterm>
|
||||
<primary>yacc</primary>
|
||||
</indexterm>
|
||||
|
||||
<application>Flex</application> and <application>Bison</application>
|
||||
are needed to build from a Git checkout, or if you changed the actual
|
||||
scanner and parser definition files. If you need them, be sure
|
||||
to get <application>Flex</application> 2.5.35 or later and
|
||||
<application>Bison</application> 2.3 or later. Other <application>lex</application>
|
||||
and <application>yacc</application> programs cannot be used.
|
||||
</para>
|
||||
</listitem>
|
||||
<listitem>
|
||||
<para>
|
||||
<indexterm>
|
||||
<primary>perl</primary>
|
||||
</indexterm>
|
||||
|
||||
<application>Perl</application> 5.14 or later is needed to build from a Git checkout,
|
||||
or if you changed the input files for any of the build steps that
|
||||
use Perl scripts. If building on Windows you will need
|
||||
<application>Perl</application> in any case. <application>Perl</application> is
|
||||
also required to run some test suites.
|
||||
</para>
|
||||
</listitem>
|
||||
</itemizedlist>
|
||||
</para>
|
||||
|
||||
<para>
|
||||
If you need to get a <acronym>GNU</acronym> package, you can find
|
||||
it at your local <acronym>GNU</acronym> mirror site (see <ulink
|
||||
|
@ -166,7 +166,8 @@ PostgreSQL documentation
|
||||
<command>pg_resetwal</command> is unable to determine appropriate values
|
||||
by reading <filename>pg_control</filename>. Safe values can be determined as
|
||||
described below. For values that take numeric arguments, hexadecimal
|
||||
values can be specified by using the prefix <literal>0x</literal>.
|
||||
values can be specified by using the prefix <literal>0x</literal>. Note
|
||||
that these instructions only apply with the standard block size of 8 kB.
|
||||
</para>
|
||||
|
||||
<variablelist>
|
||||
@ -189,6 +190,7 @@ PostgreSQL documentation
|
||||
greatest file name in the same directory. The file names are in
|
||||
hexadecimal.
|
||||
</para>
|
||||
<!-- XXX: Should there be a multiplier, similar to the other options? -->
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
@ -272,6 +274,7 @@ PostgreSQL documentation
|
||||
names are in hexadecimal, so the easiest way to do this is to specify
|
||||
the option value in hexadecimal and append four zeroes.
|
||||
</para>
|
||||
<!-- 65536 = SLRU_PAGES_PER_SEGMENT * BLCKSZ / sizeof(MultiXactOffset) -->
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
@ -306,6 +309,7 @@ PostgreSQL documentation
|
||||
The file names are in hexadecimal. There is no simple recipe such as
|
||||
the ones for other options of appending zeroes.
|
||||
</para>
|
||||
<!-- 52352 = SLRU_PAGES_PER_SEGMENT * floor(BLCKSZ/20) * 4; see multixact.c -->
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
@ -354,6 +358,7 @@ PostgreSQL documentation
|
||||
in <filename>pg_xact</filename>, <literal>-u 0x700000</literal> will work (five
|
||||
trailing zeroes provide the proper multiplier).
|
||||
</para>
|
||||
<!-- 1048576 = SLRU_PAGES_PER_SEGMENT * BLCKSZ * CLOG_XACTS_PER_BYTE -->
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
|
||||
@ -375,6 +380,7 @@ PostgreSQL documentation
|
||||
in <filename>pg_xact</filename>, <literal>-x 0x1200000</literal> will work (five
|
||||
trailing zeroes provide the proper multiplier).
|
||||
</para>
|
||||
<!-- 1048576 = SLRU_PAGES_PER_SEGMENT * BLCKSZ * CLOG_XACTS_PER_BYTE -->
|
||||
</listitem>
|
||||
</varlistentry>
|
||||
</variablelist>
|
||||
|
@ -16,16 +16,6 @@
|
||||
has some discussion on working with Git.
|
||||
</para>
|
||||
|
||||
<para>
|
||||
Note that building <productname>PostgreSQL</productname> from the source
|
||||
repository requires reasonably up-to-date versions of <application>bison</application>,
|
||||
<application>flex</application>, and <application>Perl</application>.
|
||||
These tools are not needed to build from a distribution tarball, because
|
||||
the files generated with these tools are included in the tarball.
|
||||
Other tool requirements
|
||||
are the same as shown in <xref linkend="install-requirements"/>.
|
||||
</para>
|
||||
|
||||
<sect1 id="git">
|
||||
<title>Getting the Source via <productname>Git</productname></title>
|
||||
|
||||
|
@ -72,7 +72,7 @@ configure. Alternatively use a separate check out for meson based builds.
|
||||
@0@
|
||||
****'''
|
||||
if fs.exists(meson.current_source_dir() / 'src' / 'include' / 'pg_config.h')
|
||||
errmsg_cleanup = 'To clean up, run make maintainer-clean in the source tree.'
|
||||
errmsg_cleanup = 'To clean up, run make distclean in the source tree.'
|
||||
error(errmsg_nonclean_base.format(errmsg_cleanup))
|
||||
endif
|
||||
|
||||
|
@ -59,16 +59,13 @@ uninstall: uninstall-local
|
||||
uninstall-local:
|
||||
rm -f $(addprefix '$(DESTDIR)$(pgxsdir)/$(subdir)'/, Makefile.global Makefile.port Makefile.shlib nls-global.mk)
|
||||
|
||||
distprep:
|
||||
$(MAKE) -C test/isolation $@
|
||||
|
||||
clean:
|
||||
$(MAKE) -C test $@
|
||||
$(MAKE) -C tutorial NO_PGXS=1 $@
|
||||
$(MAKE) -C test/isolation $@
|
||||
$(MAKE) -C tools/pg_bsd_indent $@
|
||||
|
||||
distclean maintainer-clean:
|
||||
distclean:
|
||||
$(MAKE) -C test $@
|
||||
$(MAKE) -C tutorial NO_PGXS=1 $@
|
||||
$(MAKE) -C test/isolation $@
|
||||
|
@ -19,11 +19,11 @@
|
||||
#
|
||||
# Meta configuration
|
||||
|
||||
standard_targets = all install installdirs uninstall distprep clean distclean maintainer-clean coverage check checkprep installcheck init-po update-po
|
||||
standard_targets = all install installdirs uninstall clean distclean coverage check checkprep installcheck init-po update-po
|
||||
# these targets should recurse even into subdirectories not being built:
|
||||
standard_always_targets = distprep clean distclean maintainer-clean
|
||||
standard_always_targets = clean distclean
|
||||
|
||||
.PHONY: $(standard_targets) install-strip html man installcheck-parallel update-unicode
|
||||
.PHONY: $(standard_targets) maintainer-clean install-strip html man installcheck-parallel update-unicode
|
||||
|
||||
# make `all' the default target
|
||||
all:
|
||||
@ -35,6 +35,8 @@ all:
|
||||
# Never delete any intermediate files automatically.
|
||||
.SECONDARY:
|
||||
|
||||
maintainer-clean: distclean
|
||||
|
||||
# PostgreSQL version number
|
||||
VERSION = @PACKAGE_VERSION@
|
||||
MAJORVERSION = @PG_MAJORVERSION@
|
||||
@ -319,12 +321,8 @@ X = @EXEEXT@
|
||||
|
||||
# Perl
|
||||
|
||||
ifneq (@PERL@,)
|
||||
# quoted to protect pathname with spaces
|
||||
PERL = '@PERL@'
|
||||
else
|
||||
PERL = $(missing) perl
|
||||
endif
|
||||
# quoted to protect pathname with spaces
|
||||
PERL = '@PERL@'
|
||||
perl_archlibexp = @perl_archlibexp@
|
||||
perl_privlibexp = @perl_privlibexp@
|
||||
perl_includespec = @perl_includespec@
|
||||
@ -777,21 +775,13 @@ TAS = @TAS@
|
||||
# Global targets and rules
|
||||
|
||||
%.c: %.l
|
||||
ifdef FLEX
|
||||
$(FLEX) $(if $(FLEX_NO_BACKUP),-b) $(FLEXFLAGS) -o'$@' $<
|
||||
@$(if $(FLEX_NO_BACKUP),if [ `wc -l <lex.backup` -eq 1 ]; then rm lex.backup; else echo "Scanner requires backup; see lex.backup." 1>&2; exit 1; fi)
|
||||
$(if $(FLEX_FIX_WARNING),$(PERL) $(top_srcdir)/src/tools/fix-old-flex-code.pl '$@')
|
||||
else
|
||||
@$(missing) flex $< '$@'
|
||||
endif
|
||||
|
||||
%.c: %.y
|
||||
$(if $(BISON_CHECK_CMD),$(BISON_CHECK_CMD))
|
||||
ifdef BISON
|
||||
$(BISON) $(BISONFLAGS) -o $@ $<
|
||||
else
|
||||
@$(missing) bison $< $@
|
||||
endif
|
||||
|
||||
%.i: %.c
|
||||
$(CPP) $(CPPFLAGS) -o $@ $<
|
||||
@ -919,7 +909,7 @@ recurse = $(foreach target,$(if $1,$1,$(standard_targets)),$(foreach subdir,$(if
|
||||
# If a makefile's list of SUBDIRS varies depending on configuration, then
|
||||
# any subdirectories excluded from SUBDIRS should instead be added to
|
||||
# ALWAYS_SUBDIRS, and then it must call recurse_always as well as recurse.
|
||||
# This ensures that distprep, distclean, etc will apply to all subdirectories.
|
||||
# This ensures that distclean, etc will apply to all subdirectories.
|
||||
# In the normal case all arguments will be defaulted.
|
||||
# $1: targets to make recursive (defaults to standard_always_targets)
|
||||
# $2: list of subdirs (defaults to ALWAYS_SUBDIRS variable)
|
||||
@ -974,7 +964,7 @@ include $(Po_files)
|
||||
endif
|
||||
|
||||
# hook for clean-up
|
||||
clean distclean maintainer-clean: clean-deps
|
||||
clean distclean: clean-deps
|
||||
|
||||
.PHONY: clean-deps
|
||||
clean-deps:
|
||||
@ -1058,7 +1048,7 @@ lcov_test.info: $(all_gcda_files)
|
||||
|
||||
|
||||
# hook for clean-up
|
||||
clean distclean maintainer-clean: clean-coverage
|
||||
clean distclean: clean-coverage
|
||||
|
||||
.PHONY: clean-coverage
|
||||
clean-coverage:
|
||||
|
@ -138,42 +138,33 @@ utils/activity/wait_event_types.h: utils/activity/generate-wait_event_types.pl u
|
||||
|
||||
# run this unconditionally to avoid needing to know its dependencies here:
|
||||
submake-catalog-headers:
|
||||
$(MAKE) -C catalog distprep generated-header-symlinks
|
||||
$(MAKE) -C catalog generated-header-symlinks
|
||||
|
||||
# run this unconditionally to avoid needing to know its dependencies here:
|
||||
submake-nodes-headers:
|
||||
$(MAKE) -C nodes distprep generated-header-symlinks
|
||||
$(MAKE) -C nodes generated-header-symlinks
|
||||
|
||||
# run this unconditionally to avoid needing to know its dependencies here:
|
||||
submake-utils-headers:
|
||||
$(MAKE) -C utils distprep generated-header-symlinks
|
||||
$(MAKE) -C utils generated-header-symlinks
|
||||
|
||||
.PHONY: submake-catalog-headers submake-nodes-headers submake-utils-headers
|
||||
|
||||
# Make symlinks for these headers in the include directory. That way
|
||||
# we can cut down on the -I options. Also, a symlink is automatically
|
||||
# up to date when we update the base file.
|
||||
#
|
||||
# The point of the prereqdir incantation in some of the rules below is to
|
||||
# force the symlink to use an absolute path rather than a relative path.
|
||||
# For headers which are generated by make distprep, the actual header within
|
||||
# src/backend will be in the source tree, while the symlink in src/include
|
||||
# will be in the build tree, so a simple ../.. reference won't work.
|
||||
# For headers generated during regular builds, we prefer a relative symlink.
|
||||
|
||||
.PHONY: generated-headers
|
||||
|
||||
generated-headers: $(top_builddir)/src/include/storage/lwlocknames.h $(top_builddir)/src/include/utils/wait_event_types.h submake-catalog-headers submake-nodes-headers submake-utils-headers
|
||||
generated-headers: $(top_builddir)/src/include/storage/lwlocknames.h $(top_builddir)/src/include/utils/wait_event_types.h submake-catalog-headers submake-nodes-headers submake-utils-headers parser/gram.h
|
||||
|
||||
$(top_builddir)/src/include/storage/lwlocknames.h: storage/lmgr/lwlocknames.h
|
||||
prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
|
||||
cd '$(dir $@)' && rm -f $(notdir $@) && \
|
||||
$(LN_S) "$$prereqdir/$(notdir $<)" .
|
||||
rm -f '$@'
|
||||
$(LN_S) ../../backend/$< '$@'
|
||||
|
||||
$(top_builddir)/src/include/utils/wait_event_types.h: utils/activity/wait_event_types.h
|
||||
prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
|
||||
cd '$(dir $@)' && rm -f $(notdir $@) && \
|
||||
$(LN_S) "$$prereqdir/$(notdir $<)" .
|
||||
rm -f '$@'
|
||||
$(LN_S) ../../backend/$< '$@'
|
||||
|
||||
utils/probes.o: utils/probes.d $(SUBDIROBJS)
|
||||
$(DTRACE) $(DTRACEFLAGS) -C -G -s $(call expand_subsys,$^) -o $@
|
||||
@ -181,16 +172,12 @@ utils/probes.o: utils/probes.d $(SUBDIROBJS)
|
||||
|
||||
##########################################################################
|
||||
|
||||
# Be sure that these files get removed by the maintainer-clean target
|
||||
distprep:
|
||||
# This target is only needed by nls.mk.
|
||||
.PHONY: generated-parser-sources
|
||||
generated-parser-sources:
|
||||
$(MAKE) -C parser gram.c gram.h scan.c
|
||||
$(MAKE) -C bootstrap bootparse.c bootparse.h bootscanner.c
|
||||
$(MAKE) -C catalog distprep
|
||||
$(MAKE) -C nodes distprep
|
||||
$(MAKE) -C replication repl_gram.c repl_gram.h repl_scanner.c syncrep_gram.c syncrep_gram.h syncrep_scanner.c
|
||||
$(MAKE) -C storage/lmgr lwlocknames.h lwlocknames.c
|
||||
$(MAKE) -C utils distprep
|
||||
$(MAKE) -C utils/activity wait_event_types.h pgstat_wait_event.c
|
||||
$(MAKE) -C utils/adt jsonpath_gram.c jsonpath_gram.h jsonpath_scan.c
|
||||
$(MAKE) -C utils/misc guc-file.c
|
||||
|
||||
@ -290,34 +277,9 @@ ifeq ($(PORTNAME), win32)
|
||||
endif
|
||||
|
||||
distclean: clean
|
||||
# generated by configure
|
||||
rm -f port/tas.s port/pg_sema.c port/pg_shmem.c
|
||||
|
||||
maintainer-clean: distclean
|
||||
$(MAKE) -C catalog $@
|
||||
$(MAKE) -C nodes $@
|
||||
$(MAKE) -C utils $@
|
||||
rm -f bootstrap/bootparse.c \
|
||||
bootstrap/bootparse.h \
|
||||
bootstrap/bootscanner.c \
|
||||
parser/gram.c \
|
||||
parser/gram.h \
|
||||
parser/scan.c \
|
||||
replication/repl_gram.c \
|
||||
replication/repl_gram.h \
|
||||
replication/repl_scanner.c \
|
||||
replication/syncrep_gram.c \
|
||||
replication/syncrep_gram.h \
|
||||
replication/syncrep_scanner.c \
|
||||
storage/lmgr/lwlocknames.c \
|
||||
storage/lmgr/lwlocknames.h \
|
||||
utils/activity/pgstat_wait_event.c \
|
||||
utils/activity/wait_event_funcs_data.c \
|
||||
utils/activity/wait_event_types.h \
|
||||
utils/adt/jsonpath_gram.c \
|
||||
utils/adt/jsonpath_gram.h \
|
||||
utils/adt/jsonpath_scan.c \
|
||||
utils/misc/guc-file.c
|
||||
|
||||
|
||||
##########################################################################
|
||||
#
|
||||
|
@ -28,5 +28,7 @@ bootparse.c: BISONFLAGS += -d
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
bootparse.o bootscanner.o: bootparse.h
|
||||
|
||||
# bootparse.c and bootscanner.c are in the distribution tarball, so
|
||||
# they are not cleaned here.
|
||||
clean:
|
||||
rm -f bootparse.c \
|
||||
bootparse.h \
|
||||
bootscanner.c
|
||||
|
@ -152,9 +152,7 @@ POSTGRES_BKI_DATA = $(addprefix $(top_srcdir)/src/include/catalog/,\
|
||||
pg_type.dat \
|
||||
)
|
||||
|
||||
all: distprep generated-header-symlinks
|
||||
|
||||
distprep: bki-stamp
|
||||
all: generated-header-symlinks
|
||||
|
||||
.PHONY: generated-header-symlinks
|
||||
|
||||
@ -173,14 +171,12 @@ bki-stamp: genbki.pl Catalog.pm $(POSTGRES_BKI_SRCS) $(POSTGRES_BKI_DATA) $(top_
|
||||
--set-version=$(MAJORVERSION) $(POSTGRES_BKI_SRCS)
|
||||
touch $@
|
||||
|
||||
# The generated headers must all be symlinked into builddir/src/include/,
|
||||
# using absolute links for the reasons explained in src/backend/Makefile.
|
||||
# The generated headers must all be symlinked into src/include/.
|
||||
# We use header-stamp to record that we've done this because the symlinks
|
||||
# themselves may appear older than bki-stamp.
|
||||
$(top_builddir)/src/include/catalog/header-stamp: bki-stamp
|
||||
prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
|
||||
cd '$(dir $@)' && for file in $(GENERATED_HEADERS); do \
|
||||
rm -f $$file && $(LN_S) "$$prereqdir/$$file" . ; \
|
||||
rm -f $$file && $(LN_S) "../../../$(subdir)/$$file" . ; \
|
||||
done
|
||||
touch $@
|
||||
|
||||
@ -201,9 +197,5 @@ installdirs:
|
||||
uninstall-data:
|
||||
rm -f $(addprefix '$(DESTDIR)$(datadir)'/, postgres.bki system_constraints.sql system_functions.sql system_views.sql information_schema.sql sql_features.txt)
|
||||
|
||||
# postgres.bki, system_constraints.sql, and the generated headers are
|
||||
# in the distribution tarball, so they are not cleaned here.
|
||||
clean:
|
||||
|
||||
maintainer-clean: clean
|
||||
rm -f bki-stamp postgres.bki system_constraints.sql $(GENERATED_HEADERS)
|
||||
|
@ -633,7 +633,7 @@ X034 XMLAgg YES
|
||||
X035 XMLAgg: ORDER BY option YES
|
||||
X036 XMLComment YES
|
||||
X037 XMLPI YES
|
||||
X038 XMLText NO
|
||||
X038 XMLText YES supported except for RETURNING
|
||||
X040 Basic table mapping YES
|
||||
X041 Basic table mapping: null absent YES
|
||||
X042 Basic table mapping: null as nil YES
|
||||
|
@ -71,6 +71,6 @@ uninstall-types:
|
||||
|
||||
include $(top_srcdir)/src/Makefile.shlib
|
||||
|
||||
clean distclean maintainer-clean: clean-lib
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
rm -f llvmjit_types.bc
|
||||
|
@ -22,7 +22,7 @@ GETTEXT_FLAGS = $(BACKEND_COMMON_GETTEXT_FLAGS) \
|
||||
report_invalid_record:2:c-format \
|
||||
ereport_startup_progress:1:c-format
|
||||
|
||||
gettext-files: distprep
|
||||
gettext-files: generated-parser-sources generated-headers
|
||||
find $(srcdir) $(srcdir)/../common $(srcdir)/../port -name '*.c' -print | LC_ALL=C sort >$@
|
||||
|
||||
my-clean:
|
||||
|
@ -65,9 +65,7 @@ node_headers = \
|
||||
|
||||
# see also catalog/Makefile for an explanation of these make rules
|
||||
|
||||
all: distprep generated-header-symlinks
|
||||
|
||||
distprep: node-support-stamp
|
||||
all: generated-header-symlinks
|
||||
|
||||
.PHONY: generated-header-symlinks
|
||||
|
||||
@ -81,14 +79,12 @@ node-support-stamp: gen_node_support.pl $(addprefix $(top_srcdir)/src/include/,$
|
||||
$(PERL) $^
|
||||
touch $@
|
||||
|
||||
# These generated headers must be symlinked into builddir/src/include/,
|
||||
# using absolute links for the reasons explained in src/backend/Makefile.
|
||||
# These generated headers must be symlinked into src/include/.
|
||||
# We use header-stamp to record that we've done this because the symlinks
|
||||
# themselves may appear older than node-support-stamp.
|
||||
$(top_builddir)/src/include/nodes/header-stamp: node-support-stamp
|
||||
prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
|
||||
cd '$(dir $@)' && for file in nodetags.h; do \
|
||||
rm -f $$file && $(LN_S) "$$prereqdir/$$file" . ; \
|
||||
rm -f $$file && $(LN_S) "../../../$(subdir)/$$file" . ; \
|
||||
done
|
||||
touch $@
|
||||
|
||||
@ -98,5 +94,5 @@ outfuncs.o: outfuncs.c outfuncs.funcs.c outfuncs.switch.c | node-support-stamp
|
||||
queryjumblefuncs.o: queryjumblefuncs.c queryjumblefuncs.funcs.c queryjumblefuncs.switch.c | node-support-stamp
|
||||
readfuncs.o: readfuncs.c readfuncs.funcs.c readfuncs.switch.c | node-support-stamp
|
||||
|
||||
maintainer-clean: clean
|
||||
clean:
|
||||
rm -f node-support-stamp $(addsuffix funcs.funcs.c,copy equal out queryjumble read) $(addsuffix funcs.switch.c,copy equal out queryjumble read) nodetags.h
|
||||
|
@ -1321,7 +1321,7 @@ innerrel_is_unique_ext(PlannerInfo *root,
|
||||
* supersets of them anyway.
|
||||
*/
|
||||
old_context = MemoryContextSwitchTo(root->planner_cxt);
|
||||
uniqueRelInfo = palloc(sizeof(UniqueRelInfo));
|
||||
uniqueRelInfo = makeNode(UniqueRelInfo);
|
||||
uniqueRelInfo->extra_clauses = outer_exprs;
|
||||
uniqueRelInfo->outerrelids = bms_copy(outerrelids);
|
||||
innerrel->unique_for_rels = lappend(innerrel->unique_for_rels,
|
||||
|
@ -64,8 +64,8 @@ scan.c: FLEX_FIX_WARNING=yes
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
gram.o scan.o parser.o: gram.h
|
||||
|
||||
|
||||
# gram.c, gram.h, and scan.c are in the distribution tarball, so they
|
||||
# are not cleaned here.
|
||||
clean distclean maintainer-clean:
|
||||
clean:
|
||||
rm -f gram.c \
|
||||
gram.h \
|
||||
scan.c
|
||||
rm -f lex.backup
|
||||
|
@ -111,18 +111,6 @@ transformAggregateCall(ParseState *pstate, Aggref *agg,
|
||||
int save_next_resno;
|
||||
ListCell *lc;
|
||||
|
||||
/*
|
||||
* Before separating the args into direct and aggregated args, make a list
|
||||
* of their data type OIDs for use later.
|
||||
*/
|
||||
foreach(lc, args)
|
||||
{
|
||||
Expr *arg = (Expr *) lfirst(lc);
|
||||
|
||||
argtypes = lappend_oid(argtypes, exprType((Node *) arg));
|
||||
}
|
||||
agg->aggargtypes = argtypes;
|
||||
|
||||
if (AGGKIND_IS_ORDERED_SET(agg->aggkind))
|
||||
{
|
||||
/*
|
||||
@ -234,6 +222,29 @@ transformAggregateCall(ParseState *pstate, Aggref *agg,
|
||||
agg->aggorder = torder;
|
||||
agg->aggdistinct = tdistinct;
|
||||
|
||||
/*
|
||||
* Now build the aggargtypes list with the type OIDs of the direct and
|
||||
* aggregated args, ignoring any resjunk entries that might have been
|
||||
* added by ORDER BY/DISTINCT processing. We can't do this earlier
|
||||
* because said processing can modify some args' data types, in particular
|
||||
* by resolving previously-unresolved "unknown" literals.
|
||||
*/
|
||||
foreach(lc, agg->aggdirectargs)
|
||||
{
|
||||
Expr *arg = (Expr *) lfirst(lc);
|
||||
|
||||
argtypes = lappend_oid(argtypes, exprType((Node *) arg));
|
||||
}
|
||||
foreach(lc, tlist)
|
||||
{
|
||||
TargetEntry *tle = (TargetEntry *) lfirst(lc);
|
||||
|
||||
if (tle->resjunk)
|
||||
continue; /* ignore junk */
|
||||
argtypes = lappend_oid(argtypes, exprType((Node *) tle->expr));
|
||||
}
|
||||
agg->aggargtypes = argtypes;
|
||||
|
||||
check_agglevels_and_constraints(pstate, (Node *) agg);
|
||||
}
|
||||
|
||||
|
@ -43,6 +43,6 @@ else
|
||||
$(CC) $(CFLAGS) -c $<
|
||||
endif
|
||||
|
||||
distclean clean:
|
||||
clean:
|
||||
rm -f tas_cpp.s
|
||||
$(MAKE) -C win32 clean
|
||||
|
@ -48,6 +48,10 @@ syncrep_gram.c: BISONFLAGS += -d
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
syncrep_gram.o syncrep_scanner.o: syncrep_gram.h
|
||||
|
||||
# repl_gram.c, repl_scanner.c, syncrep_gram.c and syncrep_scanner.c
|
||||
# are in the distribution tarball, so they are not cleaned here.
|
||||
# (Our parent Makefile takes care of them during maintainer-clean.)
|
||||
clean:
|
||||
rm -f repl_gram.c \
|
||||
repl_gram.h \
|
||||
repl_scanner.c \
|
||||
syncrep_gram.c \
|
||||
syncrep_gram.h \
|
||||
syncrep_scanner.c
|
||||
|
@ -33,5 +33,5 @@ installdirs: installdirs-lib
|
||||
|
||||
uninstall: uninstall-lib
|
||||
|
||||
clean distclean maintainer-clean: clean-lib
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
|
@ -2019,7 +2019,6 @@ void
|
||||
apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
|
||||
XLogRecPtr lsn)
|
||||
{
|
||||
StringInfoData s2;
|
||||
int nchanges;
|
||||
char path[MAXPGPATH];
|
||||
char *buffer = NULL;
|
||||
@ -2057,7 +2056,6 @@ apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
|
||||
CurrentResourceOwner = oldowner;
|
||||
|
||||
buffer = palloc(BLCKSZ);
|
||||
initStringInfo(&s2);
|
||||
|
||||
MemoryContextSwitchTo(oldcxt);
|
||||
|
||||
@ -2079,6 +2077,7 @@ apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
|
||||
nchanges = 0;
|
||||
while (true)
|
||||
{
|
||||
StringInfoData s2;
|
||||
size_t nbytes;
|
||||
int len;
|
||||
|
||||
@ -2104,9 +2103,8 @@ apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
|
||||
|
||||
BufFileTell(stream_fd, &fileno, &offset);
|
||||
|
||||
/* copy the buffer to the stringinfo and call apply_dispatch */
|
||||
resetStringInfo(&s2);
|
||||
appendBinaryStringInfo(&s2, buffer, len);
|
||||
/* init a stringinfo using the buffer and call apply_dispatch */
|
||||
initReadOnlyStringInfo(&s2, buffer, len);
|
||||
|
||||
/* Ensure we are reading the data into our memory context. */
|
||||
oldcxt = MemoryContextSwitchTo(ApplyMessageContext);
|
||||
|
@ -28,5 +28,5 @@ installdirs: installdirs-lib
|
||||
|
||||
uninstall: uninstall-lib
|
||||
|
||||
clean distclean maintainer-clean: clean-lib
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
|
@ -132,7 +132,6 @@ typedef enum WalRcvWakeupReason
|
||||
static TimestampTz wakeup[NUM_WALRCV_WAKEUPS];
|
||||
|
||||
static StringInfoData reply_message;
|
||||
static StringInfoData incoming_message;
|
||||
|
||||
/* Prototypes for private functions */
|
||||
static void WalRcvFetchTimeLineHistoryFiles(TimeLineID first, TimeLineID last);
|
||||
@ -425,7 +424,6 @@ WalReceiverMain(void)
|
||||
/* Initialize LogstreamResult and buffers for processing messages */
|
||||
LogstreamResult.Write = LogstreamResult.Flush = GetXLogReplayRecPtr(NULL);
|
||||
initStringInfo(&reply_message);
|
||||
initStringInfo(&incoming_message);
|
||||
|
||||
/* Initialize nap wakeup times. */
|
||||
now = GetCurrentTimestamp();
|
||||
@ -843,19 +841,20 @@ XLogWalRcvProcessMsg(unsigned char type, char *buf, Size len, TimeLineID tli)
|
||||
TimestampTz sendTime;
|
||||
bool replyRequested;
|
||||
|
||||
resetStringInfo(&incoming_message);
|
||||
|
||||
switch (type)
|
||||
{
|
||||
case 'w': /* WAL records */
|
||||
{
|
||||
/* copy message to StringInfo */
|
||||
StringInfoData incoming_message;
|
||||
|
||||
hdrlen = sizeof(int64) + sizeof(int64) + sizeof(int64);
|
||||
if (len < hdrlen)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROTOCOL_VIOLATION),
|
||||
errmsg_internal("invalid WAL message received from primary")));
|
||||
appendBinaryStringInfo(&incoming_message, buf, hdrlen);
|
||||
|
||||
/* initialize a StringInfo with the given buffer */
|
||||
initReadOnlyStringInfo(&incoming_message, buf, hdrlen);
|
||||
|
||||
/* read the fields */
|
||||
dataStart = pq_getmsgint64(&incoming_message);
|
||||
@ -870,13 +869,16 @@ XLogWalRcvProcessMsg(unsigned char type, char *buf, Size len, TimeLineID tli)
|
||||
}
|
||||
case 'k': /* Keepalive */
|
||||
{
|
||||
/* copy message to StringInfo */
|
||||
StringInfoData incoming_message;
|
||||
|
||||
hdrlen = sizeof(int64) + sizeof(int64) + sizeof(char);
|
||||
if (len != hdrlen)
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROTOCOL_VIOLATION),
|
||||
errmsg_internal("invalid keepalive message received from primary")));
|
||||
appendBinaryStringInfo(&incoming_message, buf, hdrlen);
|
||||
|
||||
/* initialize a StringInfo with the given buffer */
|
||||
initReadOnlyStringInfo(&incoming_message, buf, hdrlen);
|
||||
|
||||
/* read the fields */
|
||||
walEnd = pq_getmsgint64(&incoming_message);
|
||||
|
@ -104,8 +104,6 @@ include $(top_srcdir)/src/Makefile.shlib
|
||||
$(SQLSCRIPT): snowball_create.pl snowball_func.sql.in snowball.sql.in
|
||||
$(PERL) $< --input ${srcdir} --outdir .
|
||||
|
||||
distprep: $(SQLSCRIPT)
|
||||
|
||||
install: all installdirs install-lib install-script
|
||||
$(INSTALL_DATA) $(addprefix $(srcdir)/stopwords/,$(stop_files)) '$(DESTDIR)$(datadir)/$(DICTDIR)'
|
||||
|
||||
@ -122,6 +120,4 @@ uninstall: uninstall-lib
|
||||
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f $(SQLSCRIPT)
|
||||
|
@ -74,8 +74,13 @@ pg_signal_backend(int pid, int sig)
|
||||
return SIGNAL_BACKEND_ERROR;
|
||||
}
|
||||
|
||||
/* Only allow superusers to signal superuser-owned backends. */
|
||||
if (superuser_arg(proc->roleId) && !superuser())
|
||||
/*
|
||||
* Only allow superusers to signal superuser-owned backends. Any process
|
||||
* not advertising a role might have the importance of a superuser-owned
|
||||
* backend, so treat it that way.
|
||||
*/
|
||||
if ((!OidIsValid(proc->roleId) || superuser_arg(proc->roleId)) &&
|
||||
!superuser())
|
||||
return SIGNAL_BACKEND_NOSUPERUSER;
|
||||
|
||||
/* Users can signal backends they have role membership in. */
|
||||
|
@ -45,8 +45,6 @@ lwlocknames.h: $(top_srcdir)/src/backend/storage/lmgr/lwlocknames.txt generate-l
|
||||
check: s_lock_test
|
||||
./s_lock_test
|
||||
|
||||
clean distclean:
|
||||
clean:
|
||||
rm -f s_lock_test
|
||||
|
||||
maintainer-clean: clean
|
||||
rm -f lwlocknames.h lwlocknames.c
|
||||
|
@ -34,13 +34,14 @@ catalogdir = $(top_srcdir)/src/backend/catalog
|
||||
|
||||
include $(top_srcdir)/src/backend/common.mk
|
||||
|
||||
all: distprep probes.h generated-header-symlinks
|
||||
all: probes.h generated-header-symlinks
|
||||
|
||||
distprep: fmgr-stamp errcodes.h
|
||||
.PHONY: generated-header-symlinks submake-adt-headers
|
||||
|
||||
.PHONY: generated-header-symlinks
|
||||
generated-header-symlinks: $(top_builddir)/src/include/utils/header-stamp submake-adt-headers
|
||||
|
||||
generated-header-symlinks: $(top_builddir)/src/include/utils/header-stamp $(top_builddir)/src/include/utils/probes.h
|
||||
submake-adt-headers:
|
||||
$(MAKE) -C adt jsonpath_gram.h
|
||||
|
||||
$(SUBDIRS:%=%-recursive): fmgr-stamp errcodes.h
|
||||
|
||||
@ -66,22 +67,15 @@ probes.h: Gen_dummy_probes.sed probes.d
|
||||
sed -f $^ >$@
|
||||
endif
|
||||
|
||||
# These generated headers must be symlinked into builddir/src/include/,
|
||||
# using absolute links for the reasons explained in src/backend/Makefile.
|
||||
# These generated headers must be symlinked into src/include/.
|
||||
# We use header-stamp to record that we've done this because the symlinks
|
||||
# themselves may appear older than fmgr-stamp.
|
||||
$(top_builddir)/src/include/utils/header-stamp: fmgr-stamp errcodes.h
|
||||
prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
|
||||
cd '$(dir $@)' && for file in fmgroids.h fmgrprotos.h errcodes.h; do \
|
||||
rm -f $$file && $(LN_S) "$$prereqdir/$$file" . ; \
|
||||
$(top_builddir)/src/include/utils/header-stamp: fmgr-stamp errcodes.h probes.h
|
||||
cd '$(dir $@)' && for file in fmgroids.h fmgrprotos.h errcodes.h probes.h; do \
|
||||
rm -f $$file && $(LN_S) "../../../$(subdir)/$$file" . ; \
|
||||
done
|
||||
touch $@
|
||||
|
||||
# probes.h is handled differently because it's not in the distribution tarball.
|
||||
$(top_builddir)/src/include/utils/probes.h: probes.h
|
||||
cd '$(dir $@)' && rm -f $(notdir $@) && \
|
||||
$(LN_S) "../../../$(subdir)/probes.h" .
|
||||
|
||||
# Recipe for rebuilding the Perl version of Gen_dummy_probes
|
||||
# Nothing depends on it, so it will never be called unless explicitly requested
|
||||
# The last two lines of the recipe format the script according to our
|
||||
@ -104,10 +98,6 @@ installdirs:
|
||||
uninstall-data:
|
||||
rm -f $(addprefix '$(DESTDIR)$(datadir)'/, errcodes.txt)
|
||||
|
||||
# fmgroids.h, fmgrprotos.h, fmgrtab.c, fmgr-stamp, and errcodes.h are in the
|
||||
# distribution tarball, so they are not cleaned here.
|
||||
clean:
|
||||
rm -f probes.h probes.h.tmp
|
||||
|
||||
maintainer-clean: clean
|
||||
rm -f fmgroids.h fmgrprotos.h fmgrtab.c fmgr-stamp errcodes.h
|
||||
|
@ -47,5 +47,5 @@ pgstat_wait_event.c: wait_event_types.h
|
||||
wait_event_types.h: $(top_srcdir)/src/backend/utils/activity/wait_event_names.txt generate-wait_event_types.pl
|
||||
$(PERL) $(srcdir)/generate-wait_event_types.pl --code $<
|
||||
|
||||
maintainer-clean: clean
|
||||
clean:
|
||||
rm -f wait_event_types.h pgstat_wait_event.c wait_event_funcs_data.c
|
||||
|
@ -132,10 +132,9 @@ jsonpath_scan.c: FLEX_NO_BACKUP=yes
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
jsonpath_gram.o jsonpath_scan.o: jsonpath_gram.h
|
||||
|
||||
# jsonpath_gram.c and jsonpath_scan.c are in the distribution tarball,
|
||||
# so they are not cleaned here.
|
||||
clean distclean maintainer-clean:
|
||||
clean:
|
||||
rm -f lex.backup
|
||||
rm -f jsonpath_gram.c jsonpath_gram.h jsonpath_scan.c
|
||||
|
||||
like.o: like.c like_match.c
|
||||
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
#include "access/htup_details.h"
|
||||
#include "catalog/pg_type.h"
|
||||
#include "common/int.h"
|
||||
#include "funcapi.h"
|
||||
#include "libpq/pqformat.h"
|
||||
#include "nodes/nodeFuncs.h"
|
||||
@ -2326,22 +2327,38 @@ array_set_element(Datum arraydatum,
|
||||
addedbefore = addedafter = 0;
|
||||
|
||||
/*
|
||||
* Check subscripts
|
||||
* Check subscripts. We assume the existing subscripts passed
|
||||
* ArrayCheckBounds, so that dim[i] + lb[i] can be computed without
|
||||
* overflow. But we must beware of other overflows in our calculations of
|
||||
* new dim[] values.
|
||||
*/
|
||||
if (ndim == 1)
|
||||
{
|
||||
if (indx[0] < lb[0])
|
||||
{
|
||||
addedbefore = lb[0] - indx[0];
|
||||
dim[0] += addedbefore;
|
||||
/* addedbefore = lb[0] - indx[0]; */
|
||||
/* dim[0] += addedbefore; */
|
||||
if (pg_sub_s32_overflow(lb[0], indx[0], &addedbefore) ||
|
||||
pg_add_s32_overflow(dim[0], addedbefore, &dim[0]))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
|
||||
errmsg("array size exceeds the maximum allowed (%d)",
|
||||
(int) MaxArraySize)));
|
||||
lb[0] = indx[0];
|
||||
if (addedbefore > 1)
|
||||
newhasnulls = true; /* will insert nulls */
|
||||
}
|
||||
if (indx[0] >= (dim[0] + lb[0]))
|
||||
{
|
||||
addedafter = indx[0] - (dim[0] + lb[0]) + 1;
|
||||
dim[0] += addedafter;
|
||||
/* addedafter = indx[0] - (dim[0] + lb[0]) + 1; */
|
||||
/* dim[0] += addedafter; */
|
||||
if (pg_sub_s32_overflow(indx[0], dim[0] + lb[0], &addedafter) ||
|
||||
pg_add_s32_overflow(addedafter, 1, &addedafter) ||
|
||||
pg_add_s32_overflow(dim[0], addedafter, &dim[0]))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
|
||||
errmsg("array size exceeds the maximum allowed (%d)",
|
||||
(int) MaxArraySize)));
|
||||
if (addedafter > 1)
|
||||
newhasnulls = true; /* will insert nulls */
|
||||
}
|
||||
@ -2587,14 +2604,23 @@ array_set_element_expanded(Datum arraydatum,
|
||||
addedbefore = addedafter = 0;
|
||||
|
||||
/*
|
||||
* Check subscripts (this logic matches original array_set_element)
|
||||
* Check subscripts (this logic must match array_set_element). We assume
|
||||
* the existing subscripts passed ArrayCheckBounds, so that dim[i] + lb[i]
|
||||
* can be computed without overflow. But we must beware of other
|
||||
* overflows in our calculations of new dim[] values.
|
||||
*/
|
||||
if (ndim == 1)
|
||||
{
|
||||
if (indx[0] < lb[0])
|
||||
{
|
||||
addedbefore = lb[0] - indx[0];
|
||||
dim[0] += addedbefore;
|
||||
/* addedbefore = lb[0] - indx[0]; */
|
||||
/* dim[0] += addedbefore; */
|
||||
if (pg_sub_s32_overflow(lb[0], indx[0], &addedbefore) ||
|
||||
pg_add_s32_overflow(dim[0], addedbefore, &dim[0]))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
|
||||
errmsg("array size exceeds the maximum allowed (%d)",
|
||||
(int) MaxArraySize)));
|
||||
lb[0] = indx[0];
|
||||
dimschanged = true;
|
||||
if (addedbefore > 1)
|
||||
@ -2602,8 +2628,15 @@ array_set_element_expanded(Datum arraydatum,
|
||||
}
|
||||
if (indx[0] >= (dim[0] + lb[0]))
|
||||
{
|
||||
addedafter = indx[0] - (dim[0] + lb[0]) + 1;
|
||||
dim[0] += addedafter;
|
||||
/* addedafter = indx[0] - (dim[0] + lb[0]) + 1; */
|
||||
/* dim[0] += addedafter; */
|
||||
if (pg_sub_s32_overflow(indx[0], dim[0] + lb[0], &addedafter) ||
|
||||
pg_add_s32_overflow(addedafter, 1, &addedafter) ||
|
||||
pg_add_s32_overflow(dim[0], addedafter, &dim[0]))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
|
||||
errmsg("array size exceeds the maximum allowed (%d)",
|
||||
(int) MaxArraySize)));
|
||||
dimschanged = true;
|
||||
if (addedafter > 1)
|
||||
newhasnulls = true; /* will insert nulls */
|
||||
@ -2886,7 +2919,10 @@ array_set_slice(Datum arraydatum,
|
||||
addedbefore = addedafter = 0;
|
||||
|
||||
/*
|
||||
* Check subscripts
|
||||
* Check subscripts. We assume the existing subscripts passed
|
||||
* ArrayCheckBounds, so that dim[i] + lb[i] can be computed without
|
||||
* overflow. But we must beware of other overflows in our calculations of
|
||||
* new dim[] values.
|
||||
*/
|
||||
if (ndim == 1)
|
||||
{
|
||||
@ -2901,18 +2937,31 @@ array_set_slice(Datum arraydatum,
|
||||
errmsg("upper bound cannot be less than lower bound")));
|
||||
if (lowerIndx[0] < lb[0])
|
||||
{
|
||||
if (upperIndx[0] < lb[0] - 1)
|
||||
newhasnulls = true; /* will insert nulls */
|
||||
addedbefore = lb[0] - lowerIndx[0];
|
||||
dim[0] += addedbefore;
|
||||
/* addedbefore = lb[0] - lowerIndx[0]; */
|
||||
/* dim[0] += addedbefore; */
|
||||
if (pg_sub_s32_overflow(lb[0], lowerIndx[0], &addedbefore) ||
|
||||
pg_add_s32_overflow(dim[0], addedbefore, &dim[0]))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
|
||||
errmsg("array size exceeds the maximum allowed (%d)",
|
||||
(int) MaxArraySize)));
|
||||
lb[0] = lowerIndx[0];
|
||||
if (addedbefore > 1)
|
||||
newhasnulls = true; /* will insert nulls */
|
||||
}
|
||||
if (upperIndx[0] >= (dim[0] + lb[0]))
|
||||
{
|
||||
if (lowerIndx[0] > (dim[0] + lb[0]))
|
||||
/* addedafter = upperIndx[0] - (dim[0] + lb[0]) + 1; */
|
||||
/* dim[0] += addedafter; */
|
||||
if (pg_sub_s32_overflow(upperIndx[0], dim[0] + lb[0], &addedafter) ||
|
||||
pg_add_s32_overflow(addedafter, 1, &addedafter) ||
|
||||
pg_add_s32_overflow(dim[0], addedafter, &dim[0]))
|
||||
ereport(ERROR,
|
||||
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
|
||||
errmsg("array size exceeds the maximum allowed (%d)",
|
||||
(int) MaxArraySize)));
|
||||
if (addedafter > 1)
|
||||
newhasnulls = true; /* will insert nulls */
|
||||
addedafter = upperIndx[0] - (dim[0] + lb[0]) + 1;
|
||||
dim[0] += addedafter;
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -64,10 +64,6 @@ ArrayGetOffset0(int n, const int *tup, const int *scale)
|
||||
* This must do overflow checking, since it is used to validate that a user
|
||||
* dimensionality request doesn't overflow what we can handle.
|
||||
*
|
||||
* We limit array sizes to at most about a quarter billion elements,
|
||||
* so that it's not necessary to check for overflow in quite so many
|
||||
* places --- for instance when palloc'ing Datum arrays.
|
||||
*
|
||||
* The multiplication overflow check only works on machines that have int64
|
||||
* arithmetic, but that is nearly all platforms these days, and doing check
|
||||
* divides for those that don't seems way too expensive.
|
||||
@ -88,8 +84,6 @@ ArrayGetNItemsSafe(int ndim, const int *dims, struct Node *escontext)
|
||||
int32 ret;
|
||||
int i;
|
||||
|
||||
#define MaxArraySize ((Size) (MaxAllocSize / sizeof(Datum)))
|
||||
|
||||
if (ndim <= 0)
|
||||
return 0;
|
||||
ret = 1;
|
||||
|
@ -47,6 +47,7 @@
|
||||
|
||||
#ifdef USE_LIBXML
|
||||
#include <libxml/chvalid.h>
|
||||
#include <libxml/entities.h>
|
||||
#include <libxml/parser.h>
|
||||
#include <libxml/parserInternals.h>
|
||||
#include <libxml/tree.h>
|
||||
@ -513,6 +514,27 @@ xmlcomment(PG_FUNCTION_ARGS)
|
||||
}
|
||||
|
||||
|
||||
Datum
|
||||
xmltext(PG_FUNCTION_ARGS)
|
||||
{
|
||||
#ifdef USE_LIBXML
|
||||
text *arg = PG_GETARG_TEXT_PP(0);
|
||||
text *result;
|
||||
xmlChar *xmlbuf = NULL;
|
||||
|
||||
xmlbuf = xmlEncodeSpecialChars(NULL, xml_text2xmlChar(arg));
|
||||
|
||||
Assert(xmlbuf);
|
||||
|
||||
result = cstring_to_text_with_len((const char *) xmlbuf, xmlStrlen(xmlbuf));
|
||||
xmlFree(xmlbuf);
|
||||
PG_RETURN_XML_P(result);
|
||||
#else
|
||||
NO_XML_SUPPORT();
|
||||
return 0;
|
||||
#endif /* not USE_LIBXML */
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* TODO: xmlconcat needs to merge the notations and unparsed entities
|
||||
|
183
src/backend/utils/cache/inval.c
vendored
183
src/backend/utils/cache/inval.c
vendored
@ -604,6 +604,97 @@ RegisterSnapshotInvalidation(Oid dbId, Oid relId)
|
||||
dbId, relId);
|
||||
}
|
||||
|
||||
/*
|
||||
* PrepareInvalidationState
|
||||
* Initialize inval data for the current (sub)transaction.
|
||||
*/
|
||||
static void
|
||||
PrepareInvalidationState(void)
|
||||
{
|
||||
TransInvalidationInfo *myInfo;
|
||||
|
||||
if (transInvalInfo != NULL &&
|
||||
transInvalInfo->my_level == GetCurrentTransactionNestLevel())
|
||||
return;
|
||||
|
||||
myInfo = (TransInvalidationInfo *)
|
||||
MemoryContextAllocZero(TopTransactionContext,
|
||||
sizeof(TransInvalidationInfo));
|
||||
myInfo->parent = transInvalInfo;
|
||||
myInfo->my_level = GetCurrentTransactionNestLevel();
|
||||
|
||||
/* Now, do we have a previous stack entry? */
|
||||
if (transInvalInfo != NULL)
|
||||
{
|
||||
/* Yes; this one should be for a deeper nesting level. */
|
||||
Assert(myInfo->my_level > transInvalInfo->my_level);
|
||||
|
||||
/*
|
||||
* The parent (sub)transaction must not have any current (i.e.,
|
||||
* not-yet-locally-processed) messages. If it did, we'd have a
|
||||
* semantic problem: the new subtransaction presumably ought not be
|
||||
* able to see those events yet, but since the CommandCounter is
|
||||
* linear, that can't work once the subtransaction advances the
|
||||
* counter. This is a convenient place to check for that, as well as
|
||||
* being important to keep management of the message arrays simple.
|
||||
*/
|
||||
if (NumMessagesInGroup(&transInvalInfo->CurrentCmdInvalidMsgs) != 0)
|
||||
elog(ERROR, "cannot start a subtransaction when there are unprocessed inval messages");
|
||||
|
||||
/*
|
||||
* MemoryContextAllocZero set firstmsg = nextmsg = 0 in each group,
|
||||
* which is fine for the first (sub)transaction, but otherwise we need
|
||||
* to update them to follow whatever is already in the arrays.
|
||||
*/
|
||||
SetGroupToFollow(&myInfo->PriorCmdInvalidMsgs,
|
||||
&transInvalInfo->CurrentCmdInvalidMsgs);
|
||||
SetGroupToFollow(&myInfo->CurrentCmdInvalidMsgs,
|
||||
&myInfo->PriorCmdInvalidMsgs);
|
||||
}
|
||||
else
|
||||
{
|
||||
/*
|
||||
* Here, we need only clear any array pointers left over from a prior
|
||||
* transaction.
|
||||
*/
|
||||
InvalMessageArrays[CatCacheMsgs].msgs = NULL;
|
||||
InvalMessageArrays[CatCacheMsgs].maxmsgs = 0;
|
||||
InvalMessageArrays[RelCacheMsgs].msgs = NULL;
|
||||
InvalMessageArrays[RelCacheMsgs].maxmsgs = 0;
|
||||
}
|
||||
|
||||
transInvalInfo = myInfo;
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------
|
||||
* public functions
|
||||
* ----------------------------------------------------------------
|
||||
*/
|
||||
|
||||
void
|
||||
InvalidateSystemCachesExtended(bool debug_discard)
|
||||
{
|
||||
int i;
|
||||
|
||||
InvalidateCatalogSnapshot();
|
||||
ResetCatalogCaches();
|
||||
RelationCacheInvalidate(debug_discard); /* gets smgr and relmap too */
|
||||
|
||||
for (i = 0; i < syscache_callback_count; i++)
|
||||
{
|
||||
struct SYSCACHECALLBACK *ccitem = syscache_callback_list + i;
|
||||
|
||||
ccitem->function(ccitem->arg, ccitem->id, 0);
|
||||
}
|
||||
|
||||
for (i = 0; i < relcache_callback_count; i++)
|
||||
{
|
||||
struct RELCACHECALLBACK *ccitem = relcache_callback_list + i;
|
||||
|
||||
ccitem->function(ccitem->arg, InvalidOid);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* LocalExecuteInvalidationMessage
|
||||
*
|
||||
@ -704,36 +795,6 @@ InvalidateSystemCaches(void)
|
||||
InvalidateSystemCachesExtended(false);
|
||||
}
|
||||
|
||||
void
|
||||
InvalidateSystemCachesExtended(bool debug_discard)
|
||||
{
|
||||
int i;
|
||||
|
||||
InvalidateCatalogSnapshot();
|
||||
ResetCatalogCaches();
|
||||
RelationCacheInvalidate(debug_discard); /* gets smgr and relmap too */
|
||||
|
||||
for (i = 0; i < syscache_callback_count; i++)
|
||||
{
|
||||
struct SYSCACHECALLBACK *ccitem = syscache_callback_list + i;
|
||||
|
||||
ccitem->function(ccitem->arg, ccitem->id, 0);
|
||||
}
|
||||
|
||||
for (i = 0; i < relcache_callback_count; i++)
|
||||
{
|
||||
struct RELCACHECALLBACK *ccitem = relcache_callback_list + i;
|
||||
|
||||
ccitem->function(ccitem->arg, InvalidOid);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* ----------------------------------------------------------------
|
||||
* public functions
|
||||
* ----------------------------------------------------------------
|
||||
*/
|
||||
|
||||
/*
|
||||
* AcceptInvalidationMessages
|
||||
* Read and process invalidation messages from the shared invalidation
|
||||
@ -787,68 +848,6 @@ AcceptInvalidationMessages(void)
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
* PrepareInvalidationState
|
||||
* Initialize inval data for the current (sub)transaction.
|
||||
*/
|
||||
static void
|
||||
PrepareInvalidationState(void)
|
||||
{
|
||||
TransInvalidationInfo *myInfo;
|
||||
|
||||
if (transInvalInfo != NULL &&
|
||||
transInvalInfo->my_level == GetCurrentTransactionNestLevel())
|
||||
return;
|
||||
|
||||
myInfo = (TransInvalidationInfo *)
|
||||
MemoryContextAllocZero(TopTransactionContext,
|
||||
sizeof(TransInvalidationInfo));
|
||||
myInfo->parent = transInvalInfo;
|
||||
myInfo->my_level = GetCurrentTransactionNestLevel();
|
||||
|
||||
/* Now, do we have a previous stack entry? */
|
||||
if (transInvalInfo != NULL)
|
||||
{
|
||||
/* Yes; this one should be for a deeper nesting level. */
|
||||
Assert(myInfo->my_level > transInvalInfo->my_level);
|
||||
|
||||
/*
|
||||
* The parent (sub)transaction must not have any current (i.e.,
|
||||
* not-yet-locally-processed) messages. If it did, we'd have a
|
||||
* semantic problem: the new subtransaction presumably ought not be
|
||||
* able to see those events yet, but since the CommandCounter is
|
||||
* linear, that can't work once the subtransaction advances the
|
||||
* counter. This is a convenient place to check for that, as well as
|
||||
* being important to keep management of the message arrays simple.
|
||||
*/
|
||||
if (NumMessagesInGroup(&transInvalInfo->CurrentCmdInvalidMsgs) != 0)
|
||||
elog(ERROR, "cannot start a subtransaction when there are unprocessed inval messages");
|
||||
|
||||
/*
|
||||
* MemoryContextAllocZero set firstmsg = nextmsg = 0 in each group,
|
||||
* which is fine for the first (sub)transaction, but otherwise we need
|
||||
* to update them to follow whatever is already in the arrays.
|
||||
*/
|
||||
SetGroupToFollow(&myInfo->PriorCmdInvalidMsgs,
|
||||
&transInvalInfo->CurrentCmdInvalidMsgs);
|
||||
SetGroupToFollow(&myInfo->CurrentCmdInvalidMsgs,
|
||||
&myInfo->PriorCmdInvalidMsgs);
|
||||
}
|
||||
else
|
||||
{
|
||||
/*
|
||||
* Here, we need only clear any array pointers left over from a prior
|
||||
* transaction.
|
||||
*/
|
||||
InvalMessageArrays[CatCacheMsgs].msgs = NULL;
|
||||
InvalMessageArrays[CatCacheMsgs].maxmsgs = 0;
|
||||
InvalMessageArrays[RelCacheMsgs].msgs = NULL;
|
||||
InvalMessageArrays[RelCacheMsgs].maxmsgs = 0;
|
||||
}
|
||||
|
||||
transInvalInfo = myInfo;
|
||||
}
|
||||
|
||||
/*
|
||||
* PostPrepare_Inval
|
||||
* Clean up after successful PREPARE.
|
||||
|
@ -842,6 +842,14 @@ InitializeSessionUserIdStandalone(void)
|
||||
|
||||
AuthenticatedUserId = BOOTSTRAP_SUPERUSERID;
|
||||
SetSessionUserId(BOOTSTRAP_SUPERUSERID, true);
|
||||
|
||||
/*
|
||||
* XXX This should set SetConfigOption("session_authorization"), too.
|
||||
* Since we don't, C code will get NULL, and current_setting() will get an
|
||||
* empty string.
|
||||
*/
|
||||
SetConfigOption("is_superuser", "on",
|
||||
PGC_INTERNAL, PGC_S_DYNAMIC_DEFAULT);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -21,5 +21,5 @@ OBJS = \
|
||||
|
||||
include $(top_srcdir)/src/backend/common.mk
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
$(MAKE) -C conversion_procs $@
|
||||
|
@ -66,8 +66,6 @@ all: $(MAPS)
|
||||
|
||||
distclean: clean
|
||||
rm -f $(TEXTS)
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f $(MAPS)
|
||||
|
||||
|
||||
|
@ -13,5 +13,5 @@ installdirs: installdirs-lib
|
||||
|
||||
uninstall: uninstall-lib
|
||||
|
||||
clean distclean maintainer-clean: clean-lib
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
|
@ -40,6 +40,5 @@ endif
|
||||
|
||||
include $(top_srcdir)/src/backend/common.mk
|
||||
|
||||
# Note: guc-file.c is not deleted by 'make clean',
|
||||
# since we want to ship it in distribution tarballs.
|
||||
clean:
|
||||
rm -f guc-file.c
|
||||
|
@ -54,7 +54,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/initdb$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f initdb$(X) $(OBJS) localtime.c
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -40,7 +40,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_amcheck$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_amcheck$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -25,7 +25,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_archivecleanup$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_archivecleanup$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -71,7 +71,7 @@ uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_receivewal$(X)'
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_recvlogical$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_basebackup$(X) pg_receivewal$(X) pg_recvlogical$(X) \
|
||||
$(BBOBJS) pg_receivewal.o pg_recvlogical.o \
|
||||
$(OBJS)
|
||||
|
@ -36,7 +36,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_checksums$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_checksums$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -33,7 +33,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_config$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_config$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -33,7 +33,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_controldata$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_controldata$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -42,7 +42,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_ctl$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_ctl$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -69,6 +69,6 @@ installcheck:
|
||||
uninstall:
|
||||
rm -f $(addprefix '$(DESTDIR)$(bindir)'/, pg_dump$(X) pg_restore$(X) pg_dumpall$(X))
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_dump$(X) pg_restore$(X) pg_dumpall$(X) $(OBJS) pg_dump.o common.o pg_dump_sort.o pg_restore.o pg_dumpall.o
|
||||
rm -rf tmp_check
|
||||
|
@ -35,7 +35,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_resetwal$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_resetwal$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -14,6 +14,7 @@ program_options_handling_ok('pg_resetwal');
|
||||
|
||||
my $node = PostgreSQL::Test::Cluster->new('main');
|
||||
$node->init;
|
||||
$node->append_conf('postgresql.conf', 'track_commit_timestamp = on');
|
||||
|
||||
command_like([ 'pg_resetwal', '-n', $node->data_dir ],
|
||||
qr/checkpoint/, 'pg_resetwal -n produces output');
|
||||
@ -29,4 +30,207 @@ SKIP:
|
||||
'check PGDATA permissions');
|
||||
}
|
||||
|
||||
command_ok([ 'pg_resetwal', '-D', $node->data_dir ], 'pg_resetwal runs');
|
||||
$node->start;
|
||||
is($node->safe_psql("postgres", "SELECT 1;"),
|
||||
1, 'server running and working after reset');
|
||||
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', $node->data_dir ],
|
||||
qr/lock file .* exists/,
|
||||
'fails if server running');
|
||||
|
||||
$node->stop('immediate');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', $node->data_dir ],
|
||||
qr/database server was not shut down cleanly/,
|
||||
'does not run after immediate shutdown');
|
||||
command_ok(
|
||||
[ 'pg_resetwal', '-f', $node->data_dir ],
|
||||
'runs after immediate shutdown with force');
|
||||
$node->start;
|
||||
is($node->safe_psql("postgres", "SELECT 1;"),
|
||||
1, 'server running and working after forced reset');
|
||||
|
||||
$node->stop;
|
||||
|
||||
# check various command-line handling
|
||||
|
||||
# Note: This test intends to check that a nonexistent data directory
|
||||
# gives a reasonable error message. Because of the way the code is
|
||||
# currently structured, you get an error about readings permissions,
|
||||
# which is perhaps suboptimal, so feel free to update this test if
|
||||
# this gets improved.
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', 'foo' ],
|
||||
qr/error: could not read permissions of directory/,
|
||||
'fails with nonexistent data directory');
|
||||
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', 'foo', 'bar' ],
|
||||
qr/too many command-line arguments/,
|
||||
'fails with too many command-line arguments');
|
||||
|
||||
$ENV{PGDATA} = $node->data_dir; # not used
|
||||
command_fails_like(
|
||||
['pg_resetwal'],
|
||||
qr/no data directory specified/,
|
||||
'fails with too few command-line arguments');
|
||||
|
||||
# error cases
|
||||
# -c
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-c', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -c/,
|
||||
'fails with incorrect -c option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-c', '10,bar', $node->data_dir ],
|
||||
qr/error: invalid argument for option -c/,
|
||||
'fails with incorrect -c option part 2');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-c', '1,10', $node->data_dir ],
|
||||
qr/greater than/,
|
||||
'fails with -c value 1 part 1');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-c', '10,1', $node->data_dir ],
|
||||
qr/greater than/,
|
||||
'fails with -c value 1 part 2');
|
||||
# -e
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-e', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -e/,
|
||||
'fails with incorrect -e option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-e', '-1', $node->data_dir ],
|
||||
qr/must not be -1/,
|
||||
'fails with -e value -1');
|
||||
# -l
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-l', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -l/,
|
||||
'fails with incorrect -l option');
|
||||
# -m
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-m', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -m/,
|
||||
'fails with incorrect -m option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-m', '10,bar', $node->data_dir ],
|
||||
qr/error: invalid argument for option -m/,
|
||||
'fails with incorrect -m option part 2');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-m', '0,10', $node->data_dir ],
|
||||
qr/must not be 0/,
|
||||
'fails with -m value 0 part 1');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-m', '10,0', $node->data_dir ],
|
||||
qr/must not be 0/,
|
||||
'fails with -m value 0 part 2');
|
||||
# -o
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-o', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -o/,
|
||||
'fails with incorrect -o option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-o', '0', $node->data_dir ],
|
||||
qr/must not be 0/,
|
||||
'fails with -o value 0');
|
||||
# -O
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-O', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -O/,
|
||||
'fails with incorrect -O option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-O', '-1', $node->data_dir ],
|
||||
qr/must not be -1/,
|
||||
'fails with -O value -1');
|
||||
# --wal-segsize
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '--wal-segsize', 'foo', $node->data_dir ],
|
||||
qr/error: invalid value/,
|
||||
'fails with incorrect --wal-segsize option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '--wal-segsize', '13', $node->data_dir ],
|
||||
qr/must be a power/,
|
||||
'fails with invalid --wal-segsize value');
|
||||
# -u
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-u', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -u/,
|
||||
'fails with incorrect -u option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-u', '1', $node->data_dir ],
|
||||
qr/must be greater than/,
|
||||
'fails with -u value too small');
|
||||
# -x
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-x', 'foo', $node->data_dir ],
|
||||
qr/error: invalid argument for option -x/,
|
||||
'fails with incorrect -x option');
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', '-x', '1', $node->data_dir ],
|
||||
qr/must be greater than/,
|
||||
'fails with -x value too small');
|
||||
|
||||
# run with control override options
|
||||
|
||||
my $out = (run_command([ 'pg_resetwal', '-n', $node->data_dir ]))[0];
|
||||
$out =~ /^Database block size: *(\d+)$/m or die;
|
||||
my $blcksz = $1;
|
||||
|
||||
my @cmd = ('pg_resetwal', '-D', $node->data_dir);
|
||||
|
||||
# some not-so-critical hardcoded values
|
||||
push @cmd, '-e', 1;
|
||||
push @cmd, '-l', '00000001000000320000004B';
|
||||
push @cmd, '-o', 100_000;
|
||||
push @cmd, '--wal-segsize', 1;
|
||||
|
||||
# these use the guidance from the documentation
|
||||
|
||||
sub get_slru_files
|
||||
{
|
||||
opendir(my $dh, $node->data_dir . '/' . $_[0]) or die $!;
|
||||
my @files = sort grep { /[0-9A-F]+/ } readdir $dh;
|
||||
closedir $dh;
|
||||
return @files;
|
||||
}
|
||||
|
||||
my (@files, $mult);
|
||||
|
||||
@files = get_slru_files('pg_commit_ts');
|
||||
# XXX: Should there be a multiplier, similar to the other options?
|
||||
# -c argument is "old,new"
|
||||
push @cmd,
|
||||
'-c',
|
||||
sprintf("%d,%d", hex($files[0]) == 0 ? 3 : hex($files[0]), hex($files[-1]));
|
||||
|
||||
@files = get_slru_files('pg_multixact/offsets');
|
||||
$mult = 32 * $blcksz / 4;
|
||||
# -m argument is "new,old"
|
||||
push @cmd, '-m',
|
||||
sprintf("%d,%d",
|
||||
(hex($files[-1]) + 1) * $mult,
|
||||
hex($files[0]) == 0 ? 1 : hex($files[0] * $mult));
|
||||
|
||||
@files = get_slru_files('pg_multixact/members');
|
||||
$mult = 32 * int($blcksz / 20) * 4;
|
||||
push @cmd, '-O', (hex($files[-1]) + 1) * $mult;
|
||||
|
||||
@files = get_slru_files('pg_xact');
|
||||
$mult = 32 * $blcksz * 4;
|
||||
push @cmd,
|
||||
'-u', (hex($files[0]) == 0 ? 3 : hex($files[0]) * $mult),
|
||||
'-x', ((hex($files[-1]) + 1) * $mult);
|
||||
|
||||
command_ok([ @cmd, '-n' ], 'runs with control override options, dry run');
|
||||
command_ok(\@cmd, 'runs with control override options');
|
||||
command_like(
|
||||
[ 'pg_resetwal', '-n', $node->data_dir ],
|
||||
qr/^Latest checkpoint's NextOID: *100000$/m,
|
||||
'spot check that control changes were applied');
|
||||
|
||||
$node->start;
|
||||
ok(1, 'server started after reset');
|
||||
|
||||
done_testing();
|
||||
|
@ -55,4 +55,12 @@ command_checks_all(
|
||||
],
|
||||
'processes zero WAL segment size');
|
||||
|
||||
# now try to run it
|
||||
command_fails_like(
|
||||
[ 'pg_resetwal', $node->data_dir ],
|
||||
qr/not proceeding because control file values were guessed/,
|
||||
'does not run when control file values were guessed');
|
||||
command_ok([ 'pg_resetwal', '-f', $node->data_dir ],
|
||||
'runs with force when control file values were guessed');
|
||||
|
||||
done_testing();
|
||||
|
@ -49,7 +49,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_rewind$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_rewind$(X) $(OBJS) xlogreader.c
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -31,6 +31,6 @@ installcheck:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_test_fsync$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_test_fsync$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
@ -31,6 +31,6 @@ installcheck:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_test_timing$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_test_timing$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
@ -49,7 +49,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_upgrade$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_upgrade$(X) $(OBJS)
|
||||
rm -rf delete_old_cluster.sh log/ tmp_check/ \
|
||||
reindex_hash.sql
|
||||
|
@ -38,7 +38,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_verifybackup$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_verifybackup$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -45,7 +45,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/pg_waldump$(X)'
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f pg_waldump$(X) $(OBJS) $(RMGRDESCSOURCES) xlogreader.c xlogstats.c
|
||||
rm -rf tmp_check
|
||||
|
||||
|
@ -38,8 +38,6 @@ exprparse.c: BISONFLAGS += -d
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
exprparse.o exprscan.o: exprparse.h
|
||||
|
||||
distprep: exprparse.c exprscan.c
|
||||
|
||||
install: all installdirs
|
||||
$(INSTALL_PROGRAM) pgbench$(X) '$(DESTDIR)$(bindir)/pgbench$(X)'
|
||||
|
||||
@ -52,8 +50,6 @@ uninstall:
|
||||
clean distclean:
|
||||
rm -f pgbench$(X) $(OBJS)
|
||||
rm -rf tmp_check
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f exprparse.h exprparse.c exprscan.c
|
||||
|
||||
check:
|
||||
|
@ -62,8 +62,6 @@ psqlscanslash.c: FLEXFLAGS = -Cfe -p -p
|
||||
psqlscanslash.c: FLEX_NO_BACKUP=yes
|
||||
psqlscanslash.c: FLEX_FIX_WARNING=yes
|
||||
|
||||
distprep: sql_help.h sql_help.c psqlscanslash.c
|
||||
|
||||
install: all installdirs
|
||||
$(INSTALL_PROGRAM) psql$(X) '$(DESTDIR)$(bindir)/psql$(X)'
|
||||
$(INSTALL_DATA) $(srcdir)/psqlrc.sample '$(DESTDIR)$(datadir)/psqlrc.sample'
|
||||
@ -77,10 +75,6 @@ uninstall:
|
||||
clean distclean:
|
||||
rm -f psql$(X) $(OBJS) lex.backup
|
||||
rm -rf tmp_check
|
||||
|
||||
# files removed here are supposed to be in the distribution tarball,
|
||||
# so do not clean them in the clean/distclean rules
|
||||
maintainer-clean: distclean
|
||||
rm -f sql_help.h sql_help.c psqlscanslash.c
|
||||
|
||||
check:
|
||||
|
@ -48,7 +48,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f $(addprefix '$(DESTDIR)$(bindir)'/, $(addsuffix $(X), $(PROGRAMS)))
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f $(addsuffix $(X), $(PROGRAMS)) $(addsuffix .o, $(PROGRAMS))
|
||||
rm -f common.o $(WIN32RES)
|
||||
rm -rf tmp_check
|
||||
|
@ -123,8 +123,6 @@ GEN_KEYWORDLIST_DEPS = $(TOOLSDIR)/gen_keywordlist.pl $(TOOLSDIR)/PerfectHash.pm
|
||||
|
||||
all: libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a
|
||||
|
||||
distprep: kwlist_d.h
|
||||
|
||||
# libpgcommon is needed by some contrib
|
||||
install: all installdirs
|
||||
$(INSTALL_STLIB) libpgcommon.a '$(DESTDIR)$(libdir)/libpgcommon.a'
|
||||
@ -197,10 +195,7 @@ RYU_OBJS = $(RYU_FILES) $(RYU_FILES:%.o=%_shlib.o) $(RYU_FILES:%.o=%_srv.o)
|
||||
|
||||
$(RYU_OBJS): CFLAGS += $(PERMIT_DECLARATION_AFTER_STATEMENT)
|
||||
|
||||
# kwlist_d.h is in the distribution tarball, so it is not cleaned here.
|
||||
clean distclean:
|
||||
rm -f libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a
|
||||
rm -f $(OBJS_FRONTEND) $(OBJS_SHLIB) $(OBJS_SRV)
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f kwlist_d.h
|
||||
|
@ -83,5 +83,3 @@ clean:
|
||||
|
||||
distclean: clean
|
||||
rm -f UnicodeData.txt EastAsianWidth.txt CompositionExclusions.txt NormalizationTest.txt norm_test_table.h unicode_norm_table.h
|
||||
|
||||
maintainer-clean: distclean
|
||||
|
@ -48,8 +48,6 @@ psqlscan.c: FLEXFLAGS = -Cfe -p -p
|
||||
psqlscan.c: FLEX_NO_BACKUP=yes
|
||||
psqlscan.c: FLEX_FIX_WARNING=yes
|
||||
|
||||
distprep: psqlscan.c
|
||||
|
||||
# libpgfeutils could be useful to contrib, so install it
|
||||
install: all installdirs
|
||||
$(INSTALL_STLIB) libpgfeutils.a '$(DESTDIR)$(libdir)/libpgfeutils.a'
|
||||
@ -62,8 +60,4 @@ uninstall:
|
||||
|
||||
clean distclean:
|
||||
rm -f libpgfeutils.a $(OBJS) lex.backup
|
||||
|
||||
# psqlscan.c is supposed to be in the distribution tarball,
|
||||
# so do not clean it in the clean/distclean rules
|
||||
maintainer-clean: distclean
|
||||
rm -f psqlscan.c
|
||||
|
@ -78,5 +78,5 @@ clean:
|
||||
rm -f catalog/pg_*_d.h catalog/header-stamp
|
||||
rm -f nodes/nodetags.h nodes/header-stamp
|
||||
|
||||
distclean maintainer-clean: clean
|
||||
distclean: clean
|
||||
rm -f pg_config.h pg_config_ext.h pg_config_os.h stamp-h stamp-ext-h
|
||||
|
@ -8793,6 +8793,9 @@
|
||||
{ oid => '2922', descr => 'serialize an XML value to a character string',
|
||||
proname => 'text', prorettype => 'text', proargtypes => 'xml',
|
||||
prosrc => 'xmltotext' },
|
||||
{ oid => '3813', descr => 'generate XML text node',
|
||||
proname => 'xmltext', proisstrict => 't', prorettype => 'xml',
|
||||
proargtypes => 'text', prosrc => 'xmltext' },
|
||||
|
||||
{ oid => '2923', descr => 'map table contents to XML',
|
||||
proname => 'table_to_xml', procost => '100', provolatile => 's',
|
||||
|
@ -74,6 +74,13 @@ struct ExprContext;
|
||||
*/
|
||||
#define MAXDIM 6
|
||||
|
||||
/*
|
||||
* Maximum number of elements in an array. We limit this to at most about a
|
||||
* quarter billion elements, so that it's not necessary to check for overflow
|
||||
* in quite so many places --- for instance when palloc'ing Datum arrays.
|
||||
*/
|
||||
#define MaxArraySize ((Size) (MaxAllocSize / sizeof(Datum)))
|
||||
|
||||
/*
|
||||
* Arrays are varlena objects, so must meet the varlena convention that
|
||||
* the first int32 of the object contains the total object size in bytes.
|
||||
|
@ -23,7 +23,7 @@ install-pgtypeslib-recurse install-ecpglib-recurse install-compatlib-recurse ins
|
||||
install-compatlib-recurse: install-ecpglib-recurse
|
||||
install-ecpglib-recurse: install-pgtypeslib-recurse
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
$(MAKE) -C test clean
|
||||
|
||||
checktcp: | temp-install
|
||||
|
@ -55,5 +55,3 @@ uninstall: uninstall-lib
|
||||
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
|
||||
maintainer-clean: distclean
|
||||
|
@ -66,5 +66,3 @@ uninstall: uninstall-lib
|
||||
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
|
||||
maintainer-clean: distclean
|
||||
|
@ -31,5 +31,5 @@ uninstall:
|
||||
rm -f $(addprefix '$(DESTDIR)$(informix_esql_dir)'/, $(informix_headers))
|
||||
rm -f '$(DESTDIR)$(includedir)'/$(notdir $(ecpg_config_h))
|
||||
|
||||
distclean maintainer-clean:
|
||||
distclean:
|
||||
rm -f ecpg_config.h stamp-h
|
||||
|
@ -50,5 +50,3 @@ uninstall: uninstall-lib
|
||||
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
|
||||
maintainer-clean: distclean
|
||||
|
@ -81,8 +81,6 @@ ecpg_keywords.o: ecpg_kwlist_d.h
|
||||
c_keywords.o: c_kwlist_d.h
|
||||
keywords.o: $(top_srcdir)/src/include/parser/kwlist.h
|
||||
|
||||
distprep: preproc.y preproc.c preproc.h pgc.c c_kwlist_d.h ecpg_kwlist_d.h
|
||||
|
||||
install: all installdirs
|
||||
$(INSTALL_PROGRAM) ecpg$(X) '$(DESTDIR)$(bindir)'
|
||||
|
||||
@ -92,11 +90,7 @@ installdirs:
|
||||
uninstall:
|
||||
rm -f '$(DESTDIR)$(bindir)/ecpg$(X)'
|
||||
|
||||
# preproc.y, preproc.c, preproc.h, pgc.c, c_kwlist_d.h, and ecpg_kwlist_d.h
|
||||
# are in the distribution tarball, so they are not cleaned here.
|
||||
clean distclean:
|
||||
rm -f *.o ecpg$(X)
|
||||
rm -f typename.c
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f preproc.y preproc.c preproc.h pgc.c c_kwlist_d.h ecpg_kwlist_d.h
|
||||
|
@ -21,7 +21,7 @@ else
|
||||
abs_builddir := $(shell sh -c "pwd -W")
|
||||
endif
|
||||
|
||||
all install installdirs uninstall distprep:
|
||||
all install installdirs uninstall:
|
||||
$(MAKE) -C connect $@
|
||||
$(MAKE) -C sql $@
|
||||
$(MAKE) -C pgtypeslib $@
|
||||
@ -30,7 +30,7 @@ all install installdirs uninstall distprep:
|
||||
$(MAKE) -C compat_oracle $@
|
||||
$(MAKE) -C thread $@
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
$(MAKE) -C connect $@
|
||||
$(MAKE) -C sql $@
|
||||
$(MAKE) -C pgtypeslib $@
|
||||
|
@ -165,6 +165,3 @@ clean distclean: clean-lib
|
||||
rm -f $(OBJS) pthread.h libpq-refs-stamp
|
||||
# Might be left over from a Win32 client-only build
|
||||
rm -f pg_config_paths.h
|
||||
|
||||
maintainer-clean: distclean
|
||||
$(MAKE) -C test $@
|
||||
|
@ -20,5 +20,5 @@ all: $(PROGS)
|
||||
|
||||
$(PROGS): $(WIN32RES)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f $(PROGS) *.o
|
||||
|
@ -391,7 +391,7 @@ ifdef MODULE_big
|
||||
clean: clean-lib
|
||||
endif
|
||||
|
||||
distclean maintainer-clean: clean
|
||||
distclean: clean
|
||||
|
||||
|
||||
ifdef REGRESS
|
||||
|
@ -171,7 +171,7 @@ all: all-po
|
||||
install: install-po
|
||||
installdirs: installdirs-po
|
||||
uninstall: uninstall-po
|
||||
clean distclean maintainer-clean: clean-po
|
||||
clean distclean: clean-po
|
||||
|
||||
.PHONY: all-po install-po installdirs-po uninstall-po clean-po \
|
||||
init-po update-po
|
||||
|
@ -127,7 +127,7 @@ installcheck: submake
|
||||
submake:
|
||||
$(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X)
|
||||
|
||||
clean distclean maintainer-clean: clean-lib
|
||||
clean distclean: clean-lib
|
||||
rm -f SPI.c Util.c $(OBJS) perlchunks.h plperl_opmask.h
|
||||
rm -rf $(pg_regress_clean_files)
|
||||
ifeq ($(PORTNAME), win32)
|
||||
|
@ -104,14 +104,7 @@ submake:
|
||||
$(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X)
|
||||
|
||||
|
||||
distprep: pl_gram.h pl_gram.c plerrcodes.h pl_reserved_kwlist_d.h pl_unreserved_kwlist_d.h
|
||||
|
||||
# pl_gram.c, pl_gram.h, plerrcodes.h, pl_reserved_kwlist_d.h, and
|
||||
# pl_unreserved_kwlist_d.h are in the distribution tarball, so they
|
||||
# are not cleaned here.
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
rm -rf $(pg_regress_clean_files)
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f pl_gram.c pl_gram.h plerrcodes.h pl_reserved_kwlist_d.h pl_unreserved_kwlist_d.h
|
||||
|
@ -142,6 +142,7 @@ clean distclean: clean-lib
|
||||
ifeq ($(PORTNAME), win32)
|
||||
rm -f python${pytverstr}.def
|
||||
endif
|
||||
rm -f spiexceptions.h
|
||||
|
||||
|
||||
# Force this dependency to be known even without dependency info built:
|
||||
@ -149,8 +150,3 @@ plpy_plpymodule.o: spiexceptions.h
|
||||
|
||||
spiexceptions.h: $(top_srcdir)/src/backend/utils/errcodes.txt generate-spiexceptions.pl
|
||||
$(PERL) $(srcdir)/generate-spiexceptions.pl $< > $@
|
||||
|
||||
distprep: spiexceptions.h
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f spiexceptions.h
|
||||
|
@ -63,8 +63,6 @@ pltcl.o: pltclerrcodes.h
|
||||
pltclerrcodes.h: $(top_srcdir)/src/backend/utils/errcodes.txt generate-pltclerrcodes.pl
|
||||
$(PERL) $(srcdir)/generate-pltclerrcodes.pl $< > $@
|
||||
|
||||
distprep: pltclerrcodes.h
|
||||
|
||||
install: all install-lib install-data
|
||||
|
||||
installdirs: installdirs-lib
|
||||
@ -91,13 +89,10 @@ installcheck: submake
|
||||
submake:
|
||||
$(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X)
|
||||
|
||||
# pltclerrcodes.h is in the distribution tarball, so don't clean it here.
|
||||
clean distclean: clean-lib
|
||||
rm -f $(OBJS)
|
||||
rm -rf $(pg_regress_clean_files)
|
||||
ifeq ($(PORTNAME), win32)
|
||||
rm -f $(tclwithver).def
|
||||
endif
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f pltclerrcodes.h
|
||||
|
@ -154,6 +154,6 @@ pg_config_paths.h: $(top_builddir)/src/Makefile.global
|
||||
echo "#define HTMLDIR \"$(htmldir)\"" >>$@
|
||||
echo "#define MANDIR \"$(mandir)\"" >>$@
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f libpgport.a libpgport_shlib.a libpgport_srv.a
|
||||
rm -f $(OBJS) $(OBJS_SHLIB) $(OBJS_SRV) pg_config_paths.h
|
||||
|
@ -19,5 +19,5 @@ check:
|
||||
installcheck:
|
||||
$(prove_installcheck)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -rf tmp_check
|
||||
|
@ -18,5 +18,5 @@ PROGS = testlibpq testlibpq2 testlibpq3 testlibpq4 testlo testlo64
|
||||
|
||||
all: $(PROGS)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f $(PROGS) *.o
|
||||
|
@ -21,5 +21,5 @@ check:
|
||||
installcheck:
|
||||
$(prove_installcheck)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -rf tmp_check
|
||||
|
@ -43,8 +43,6 @@ pg_isolation_regress$(X): isolation_main.o pg_regress.o $(WIN32RES)
|
||||
isolationtester$(X): $(OBJS) | submake-libpq submake-libpgport
|
||||
$(CC) $(CFLAGS) $^ $(libpq_pgport) $(LDFLAGS) $(LDFLAGS_EX) $(LIBS) -o $@
|
||||
|
||||
distprep: specparse.c specscanner.c
|
||||
|
||||
# See notes in src/backend/parser/Makefile about the following two rules
|
||||
specparse.h: specparse.c
|
||||
touch $@
|
||||
@ -54,14 +52,10 @@ specparse.c: BISONFLAGS += -d
|
||||
# Force these dependencies to be known even without dependency info built:
|
||||
specparse.o specscanner.o: specparse.h
|
||||
|
||||
# specparse.c and specscanner.c are in the distribution tarball,
|
||||
# so do not clean them here
|
||||
clean distclean:
|
||||
rm -f isolationtester$(X) pg_isolation_regress$(X) $(OBJS) isolation_main.o
|
||||
rm -f pg_regress.o
|
||||
rm -rf $(pg_regress_clean_files)
|
||||
|
||||
maintainer-clean: distclean
|
||||
rm -f specparse.h specparse.c specscanner.c
|
||||
|
||||
installcheck: all
|
||||
|
@ -24,5 +24,5 @@ check:
|
||||
installcheck:
|
||||
$(prove_installcheck)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -rf tmp_check
|
||||
|
@ -21,5 +21,5 @@ check:
|
||||
installcheck:
|
||||
$(prove_installcheck)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -rf tmp_check
|
||||
|
@ -10,7 +10,7 @@ DIRS = de_DE.ISO8859-1 gr_GR.ISO8859-7 koi8-r koi8-to-win1251
|
||||
|
||||
all: $(PROGS)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -f $(PROGS) *.o
|
||||
rm -rf tmp_check
|
||||
for d in $(DIRS); do \
|
||||
|
@ -25,5 +25,5 @@ check:
|
||||
installcheck:
|
||||
$(prove_installcheck)
|
||||
|
||||
clean distclean maintainer-clean:
|
||||
clean distclean:
|
||||
rm -rf tmp_check
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user