Compare commits

..

No commits in common. "c2bdd2c5b1d48a7e39e1a8d5e1d90b731b53c4c9" and "2c7c6c417fe655ab3fd4ca7f68ec22c913a2fe80" have entirely different histories.

117 changed files with 562 additions and 857 deletions

View File

@ -41,7 +41,7 @@ install-world-bin-contrib-recurse: install-world-bin-src-recurse
$(call recurse,installdirs uninstall init-po update-po,doc src config) $(call recurse,installdirs uninstall init-po update-po,doc src config)
$(call recurse,coverage,doc src config contrib) $(call recurse,distprep coverage,doc src config contrib)
# clean, distclean, etc should apply to contrib too, even though # clean, distclean, etc should apply to contrib too, even though
# it's not built by default # it's not built by default
@ -53,7 +53,7 @@ clean:
# Important: distclean `src' last, otherwise Makefile.global # Important: distclean `src' last, otherwise Makefile.global
# will be gone too soon. # will be gone too soon.
distclean: distclean maintainer-clean:
$(MAKE) -C doc $@ $(MAKE) -C doc $@
$(MAKE) -C contrib $@ $(MAKE) -C contrib $@
$(MAKE) -C config $@ $(MAKE) -C config $@
@ -109,6 +109,7 @@ distdir:
|| cp "$(top_srcdir)/$$file" "$(distdir)/$$file"; \ || cp "$(top_srcdir)/$$file" "$(distdir)/$$file"; \
fi || exit; \ fi || exit; \
done done
$(MAKE) -C $(distdir) distprep
$(MAKE) -C $(distdir)/doc/src/sgml/ INSTALL $(MAKE) -C $(distdir)/doc/src/sgml/ INSTALL
cp $(distdir)/doc/src/sgml/INSTALL $(distdir)/ cp $(distdir)/doc/src/sgml/INSTALL $(distdir)/
$(MAKE) -C $(distdir) distclean $(MAKE) -C $(distdir) distclean
@ -121,6 +122,7 @@ distcheck: dist
install_prefix=`cd $(dummy) && pwd`; \ install_prefix=`cd $(dummy) && pwd`; \
cd $(distdir) \ cd $(distdir) \
&& ./configure --prefix="$$install_prefix" && ./configure --prefix="$$install_prefix"
$(MAKE) -C $(distdir) -q distprep
$(MAKE) -C $(distdir) $(MAKE) -C $(distdir)
$(MAKE) -C $(distdir) install $(MAKE) -C $(distdir) install
$(MAKE) -C $(distdir) uninstall $(MAKE) -C $(distdir) uninstall

View File

@ -13,14 +13,19 @@ if test "$PERL"; then
if echo "$pgac_perl_version" | sed ['s/[.a-z_]/ /g'] | \ if echo "$pgac_perl_version" | sed ['s/[.a-z_]/ /g'] | \
$AWK '{ if ([$]1 == 5 && ([$]2 >= 14)) exit 1; else exit 0;}' $AWK '{ if ([$]1 == 5 && ([$]2 >= 14)) exit 1; else exit 0;}'
then then
AC_MSG_ERROR([ AC_MSG_WARN([
*** The installed version of Perl, $PERL, is too old to use with PostgreSQL. *** The installed version of Perl, $PERL, is too old to use with PostgreSQL.
*** Perl version 5.14 or later is required, but this is $pgac_perl_version.]) *** Perl version 5.14 or later is required, but this is $pgac_perl_version.])
PERL=""
fi fi
fi fi
if test -z "$PERL"; then if test -z "$PERL"; then
AC_MSG_ERROR([Perl not found]) AC_MSG_WARN([
*** Without Perl you will not be able to build PostgreSQL from Git.
*** You can obtain Perl from any CPAN mirror site.
*** (If you are using the official distribution of PostgreSQL then you do not
*** need to worry about this, because the Perl output is pre-generated.)])
fi fi
])# PGAC_PATH_PERL ])# PGAC_PATH_PERL

View File

@ -33,9 +33,10 @@ if test "$BISON"; then
AC_MSG_NOTICE([using $pgac_bison_version]) AC_MSG_NOTICE([using $pgac_bison_version])
if echo "$pgac_bison_version" | $AWK '{ if ([$]4 < 2.3) exit 0; else exit 1;}' if echo "$pgac_bison_version" | $AWK '{ if ([$]4 < 2.3) exit 0; else exit 1;}'
then then
AC_MSG_ERROR([ AC_MSG_WARN([
*** The installed version of Bison, $BISON, is too old to use with PostgreSQL. *** The installed version of Bison, $BISON, is too old to use with PostgreSQL.
*** Bison version 2.3 or later is required, but this is $pgac_bison_version.]) *** Bison version 2.3 or later is required, but this is $pgac_bison_version.])
BISON=""
fi fi
# Bison >=3.0 issues warnings about %name-prefix="base_yy", instead # Bison >=3.0 issues warnings about %name-prefix="base_yy", instead
# of the now preferred %name-prefix "base_yy", but the latter # of the now preferred %name-prefix "base_yy", but the latter
@ -48,7 +49,12 @@ if test "$BISON"; then
fi fi
if test -z "$BISON"; then if test -z "$BISON"; then
AC_MSG_ERROR([bison not found]) AC_MSG_WARN([
*** Without Bison you will not be able to build PostgreSQL from Git nor
*** change any of the parser definition files. You can obtain Bison from
*** a GNU mirror site. (If you are using the official distribution of
*** PostgreSQL then you do not need to worry about this, because the Bison
*** output is pre-generated.)])
fi fi
dnl We don't need AC_SUBST(BISON) because PGAC_PATH_PROGS did it dnl We don't need AC_SUBST(BISON) because PGAC_PATH_PROGS did it
AC_SUBST(BISONFLAGS) AC_SUBST(BISONFLAGS)
@ -89,7 +95,7 @@ else
pgac_cv_path_flex=$pgac_candidate pgac_cv_path_flex=$pgac_candidate
break 2 break 2
else else
AC_MSG_ERROR([ AC_MSG_WARN([
*** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL. *** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL.
*** Flex version 2.5.35 or later is required, but this is $pgac_flex_version.]) *** Flex version 2.5.35 or later is required, but this is $pgac_flex_version.])
fi fi
@ -103,7 +109,14 @@ fi
])[]dnl AC_CACHE_CHECK ])[]dnl AC_CACHE_CHECK
if test x"$pgac_cv_path_flex" = x"no"; then if test x"$pgac_cv_path_flex" = x"no"; then
AC_MSG_ERROR([flex not found]) AC_MSG_WARN([
*** Without Flex you will not be able to build PostgreSQL from Git nor
*** change any of the scanner definition files. You can obtain Flex from
*** a GNU mirror site. (If you are using the official distribution of
*** PostgreSQL then you do not need to worry about this because the Flex
*** output is pre-generated.)])
FLEX=
else else
FLEX=$pgac_cv_path_flex FLEX=$pgac_cv_path_flex
pgac_flex_version=`$FLEX --version 2>/dev/null` pgac_flex_version=`$FLEX --version 2>/dev/null`

62
configure vendored
View File

@ -10162,9 +10162,13 @@ if test "$BISON"; then
$as_echo "$as_me: using $pgac_bison_version" >&6;} $as_echo "$as_me: using $pgac_bison_version" >&6;}
if echo "$pgac_bison_version" | $AWK '{ if ($4 < 2.3) exit 0; else exit 1;}' if echo "$pgac_bison_version" | $AWK '{ if ($4 < 2.3) exit 0; else exit 1;}'
then then
as_fn_error $? " { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
*** The installed version of Bison, $BISON, is too old to use with PostgreSQL. *** The installed version of Bison, $BISON, is too old to use with PostgreSQL.
*** Bison version 2.3 or later is required, but this is $pgac_bison_version." "$LINENO" 5 *** Bison version 2.3 or later is required, but this is $pgac_bison_version." >&5
$as_echo "$as_me: WARNING:
*** The installed version of Bison, $BISON, is too old to use with PostgreSQL.
*** Bison version 2.3 or later is required, but this is $pgac_bison_version." >&2;}
BISON=""
fi fi
# Bison >=3.0 issues warnings about %name-prefix="base_yy", instead # Bison >=3.0 issues warnings about %name-prefix="base_yy", instead
# of the now preferred %name-prefix "base_yy", but the latter # of the now preferred %name-prefix "base_yy", but the latter
@ -10177,7 +10181,18 @@ $as_echo "$as_me: using $pgac_bison_version" >&6;}
fi fi
if test -z "$BISON"; then if test -z "$BISON"; then
as_fn_error $? "bison not found" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
*** Without Bison you will not be able to build PostgreSQL from Git nor
*** change any of the parser definition files. You can obtain Bison from
*** a GNU mirror site. (If you are using the official distribution of
*** PostgreSQL then you do not need to worry about this, because the Bison
*** output is pre-generated.)" >&5
$as_echo "$as_me: WARNING:
*** Without Bison you will not be able to build PostgreSQL from Git nor
*** change any of the parser definition files. You can obtain Bison from
*** a GNU mirror site. (If you are using the official distribution of
*** PostgreSQL then you do not need to worry about this, because the Bison
*** output is pre-generated.)" >&2;}
fi fi
@ -10210,9 +10225,12 @@ else
pgac_cv_path_flex=$pgac_candidate pgac_cv_path_flex=$pgac_candidate
break 2 break 2
else else
as_fn_error $? " { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
*** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL. *** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL.
*** Flex version 2.5.35 or later is required, but this is $pgac_flex_version." "$LINENO" 5 *** Flex version 2.5.35 or later is required, but this is $pgac_flex_version." >&5
$as_echo "$as_me: WARNING:
*** The installed version of Flex, $pgac_candidate, is too old to use with PostgreSQL.
*** Flex version 2.5.35 or later is required, but this is $pgac_flex_version." >&2;}
fi fi
fi fi
fi fi
@ -10226,7 +10244,20 @@ fi
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $pgac_cv_path_flex" >&5 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $pgac_cv_path_flex" >&5
$as_echo "$pgac_cv_path_flex" >&6; } $as_echo "$pgac_cv_path_flex" >&6; }
if test x"$pgac_cv_path_flex" = x"no"; then if test x"$pgac_cv_path_flex" = x"no"; then
as_fn_error $? "flex not found" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
*** Without Flex you will not be able to build PostgreSQL from Git nor
*** change any of the scanner definition files. You can obtain Flex from
*** a GNU mirror site. (If you are using the official distribution of
*** PostgreSQL then you do not need to worry about this because the Flex
*** output is pre-generated.)" >&5
$as_echo "$as_me: WARNING:
*** Without Flex you will not be able to build PostgreSQL from Git nor
*** change any of the scanner definition files. You can obtain Flex from
*** a GNU mirror site. (If you are using the official distribution of
*** PostgreSQL then you do not need to worry about this because the Flex
*** output is pre-generated.)" >&2;}
FLEX=
else else
FLEX=$pgac_cv_path_flex FLEX=$pgac_cv_path_flex
pgac_flex_version=`$FLEX --version 2>/dev/null` pgac_flex_version=`$FLEX --version 2>/dev/null`
@ -10300,14 +10331,27 @@ $as_echo "$as_me: using perl $pgac_perl_version" >&6;}
if echo "$pgac_perl_version" | sed 's/[.a-z_]/ /g' | \ if echo "$pgac_perl_version" | sed 's/[.a-z_]/ /g' | \
$AWK '{ if ($1 == 5 && ($2 >= 14)) exit 1; else exit 0;}' $AWK '{ if ($1 == 5 && ($2 >= 14)) exit 1; else exit 0;}'
then then
as_fn_error $? " { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
*** The installed version of Perl, $PERL, is too old to use with PostgreSQL. *** The installed version of Perl, $PERL, is too old to use with PostgreSQL.
*** Perl version 5.14 or later is required, but this is $pgac_perl_version." "$LINENO" 5 *** Perl version 5.14 or later is required, but this is $pgac_perl_version." >&5
$as_echo "$as_me: WARNING:
*** The installed version of Perl, $PERL, is too old to use with PostgreSQL.
*** Perl version 5.14 or later is required, but this is $pgac_perl_version." >&2;}
PERL=""
fi fi
fi fi
if test -z "$PERL"; then if test -z "$PERL"; then
as_fn_error $? "Perl not found" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING:
*** Without Perl you will not be able to build PostgreSQL from Git.
*** You can obtain Perl from any CPAN mirror site.
*** (If you are using the official distribution of PostgreSQL then you do not
*** need to worry about this, because the Perl output is pre-generated.)" >&5
$as_echo "$as_me: WARNING:
*** Without Perl you will not be able to build PostgreSQL from Git.
*** You can obtain Perl from any CPAN mirror site.
*** (If you are using the official distribution of PostgreSQL then you do not
*** need to worry about this, because the Perl output is pre-generated.)" >&2;}
fi fi
if test "$with_perl" = yes; then if test "$with_perl" = yes; then

View File

@ -18,8 +18,6 @@ REGRESS = cube cube_sci
SHLIB_LINK += $(filter -lm, $(LIBS)) SHLIB_LINK += $(filter -lm, $(LIBS))
EXTRA_CLEAN = cubeparse.h cubeparse.c cubescan.c
ifdef USE_PGXS ifdef USE_PGXS
PG_CONFIG = pg_config PG_CONFIG = pg_config
PGXS := $(shell $(PG_CONFIG) --pgxs) PGXS := $(shell $(PG_CONFIG) --pgxs)
@ -39,3 +37,8 @@ cubeparse.c: BISONFLAGS += -d
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
cubeparse.o cubescan.o: cubeparse.h cubeparse.o cubescan.o: cubeparse.h
distprep: cubeparse.c cubescan.c
maintainer-clean:
rm -f cubeparse.h cubeparse.c cubescan.c

View File

@ -15,8 +15,6 @@ PGFILEDESC = "fuzzystrmatch - similarities and distance between strings"
REGRESS = fuzzystrmatch fuzzystrmatch_utf8 REGRESS = fuzzystrmatch fuzzystrmatch_utf8
EXTRA_CLEAN = daitch_mokotoff.h
ifdef USE_PGXS ifdef USE_PGXS
PG_CONFIG = pg_config PG_CONFIG = pg_config
PGXS := $(shell $(PG_CONFIG) --pgxs) PGXS := $(shell $(PG_CONFIG) --pgxs)
@ -33,3 +31,10 @@ daitch_mokotoff.o: daitch_mokotoff.h
daitch_mokotoff.h: daitch_mokotoff_header.pl daitch_mokotoff.h: daitch_mokotoff_header.pl
$(PERL) $< $@ $(PERL) $< $@
# daitch_mokotoff.h is included in tarballs, so it has to be made by
# "distprep" and not cleaned except by "maintainer-clean".
distprep: daitch_mokotoff.h
maintainer-clean:
rm -f daitch_mokotoff.h

View File

@ -16,7 +16,7 @@ HEADERS = segdata.h
REGRESS = security seg REGRESS = security seg
EXTRA_CLEAN = segparse.h segparse.c segscan.c EXTRA_CLEAN = y.tab.c y.tab.h
ifdef USE_PGXS ifdef USE_PGXS
PG_CONFIG = pg_config PG_CONFIG = pg_config
@ -38,3 +38,8 @@ segparse.c: BISONFLAGS += -d
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
segparse.o segscan.o: segparse.h segparse.o segscan.o: segparse.h
distprep: segparse.c segscan.c
maintainer-clean:
rm -f segparse.h segparse.c segscan.c

View File

@ -12,5 +12,5 @@ subdir = doc
top_builddir = .. top_builddir = ..
include $(top_builddir)/src/Makefile.global include $(top_builddir)/src/Makefile.global
all html man install installdirs uninstall clean distclean: all distprep html man install installdirs uninstall clean distclean maintainer-clean:
$(MAKE) -C src $@ $(MAKE) -C src $@

View File

@ -4,5 +4,5 @@ subdir = doc/src
top_builddir = ../.. top_builddir = ../..
include $(top_builddir)/src/Makefile.global include $(top_builddir)/src/Makefile.global
all html man install installdirs uninstall clean distclean: all distprep html man install installdirs uninstall clean distclean maintainer-clean:
$(MAKE) -C sgml $@ $(MAKE) -C sgml $@

View File

@ -7,7 +7,8 @@
#---------------------------------------------------------------------------- #----------------------------------------------------------------------------
# This makefile is for building and installing the documentation. # This makefile is for building and installing the documentation.
# In Git-based trees these files # When a release tarball is created, the documentation files are
# prepared using the distprep target. In Git-based trees these files
# don't exist, unless explicitly built, so we skip the installation in # don't exist, unless explicitly built, so we skip the installation in
# that case. # that case.
@ -27,6 +28,8 @@ include $(top_builddir)/src/Makefile.global
all: html man all: html man
distprep: html distprep-man
ifndef DBTOEPUB ifndef DBTOEPUB
DBTOEPUB = $(missing) dbtoepub DBTOEPUB = $(missing) dbtoepub
@ -74,7 +77,7 @@ postgres-full.xml: postgres.sgml $(ALLSGML)
## Man pages ## Man pages
## ##
man: man-stamp man distprep-man: man-stamp
man-stamp: stylesheet-man.xsl postgres-full.xml man-stamp: stylesheet-man.xsl postgres-full.xml
$(XSLTPROC) $(XMLINCLUDE) $(XSLTPROCFLAGS) $(XSLTPROC_MAN_FLAGS) $^ $(XSLTPROC) $(XMLINCLUDE) $(XSLTPROCFLAGS) $(XSLTPROC_MAN_FLAGS) $^
@ -86,9 +89,10 @@ man-stamp: stylesheet-man.xsl postgres-full.xml
## ##
# Technically, this should depend on Makefile.global, but then # Technically, this should depend on Makefile.global, but then
# version.sgml would need to be rebuilt after every configure run. So # version.sgml would need to be rebuilt after every configure run,
# this is cheating a bit, but it will achieve the goal of updating the # even in distribution tarballs. So this is cheating a bit, but it
# version number when it changes. # will achieve the goal of updating the version number when it
# changes.
version.sgml: $(top_srcdir)/configure version.sgml: $(top_srcdir)/configure
{ \ { \
echo "<!ENTITY version \"$(VERSION)\">"; \ echo "<!ENTITY version \"$(VERSION)\">"; \
@ -301,6 +305,8 @@ clean:
rm -f *.texixml *.texi *.info db2texi.refs rm -f *.texixml *.texi *.info db2texi.refs
distclean: clean distclean: clean
maintainer-clean: distclean
# HTML # HTML
rm -fr html/ html-stamp rm -fr html/ html-stamp
# man # man

View File

@ -199,15 +199,6 @@
standard. standard.
</para> </para>
</listitem> </listitem>
<listitem>
<para>
<productname>PostgreSQL</productname> does not support the
<literal>RETURNING CONTENT</literal> or <literal>RETURNING SEQUENCE</literal>
clauses, functions which are defined to have these in the specification
are implicitly returning content.
</para>
</listitem>
</itemizedlist> </itemizedlist>
</para> </para>

View File

@ -14180,36 +14180,6 @@ CREATE TYPE rainbow AS ENUM ('red', 'orange', 'yellow', 'green', 'blue', 'purple
documents for processing in client applications. documents for processing in client applications.
</para> </para>
<sect3 id="functions-producing-xml-xmltext">
<title><literal>xmltext</literal></title>
<indexterm>
<primary>xmltext</primary>
</indexterm>
<synopsis>
<function>xmltext</function> ( <type>text</type> ) <returnvalue>xml</returnvalue>
</synopsis>
<para>
The function <function>xmltext</function> returns an XML value with a single
text node containing the input argument as its content. Predefined entities
like ampersand (<literal><![CDATA[&]]></literal>), left and right angle brackets
(<literal><![CDATA[< >]]></literal>), and quotation marks (<literal><![CDATA[""]]></literal>)
are escaped.
</para>
<para>
Example:
<screen><![CDATA[
SELECT xmltext('< foo & bar >');
xmltext
-------------------------
&lt; foo &amp; bar &gt;
]]></screen>
</para>
</sect3>
<sect3 id="functions-producing-xml-xmlcomment"> <sect3 id="functions-producing-xml-xmlcomment">
<title><literal>xmlcomment</literal></title> <title><literal>xmlcomment</literal></title>

View File

@ -72,7 +72,8 @@ documentation. See standalone-profile.xsl for details.
Alternatively, <productname>PostgreSQL</productname> can be built using Alternatively, <productname>PostgreSQL</productname> can be built using
<ulink url="https://mesonbuild.com/">Meson</ulink>. This is currently <ulink url="https://mesonbuild.com/">Meson</ulink>. This is currently
experimental. If you choose to use experimental and only works when building from a Git checkout (not from
a distribution tarball). If you choose to use
<application>Meson</application>, then you don't need <application>Meson</application>, then you don't need
<acronym>GNU</acronym> <application>make</application>, but the other <acronym>GNU</acronym> <application>make</application>, but the other
requirements below still apply. requirements below still apply.
@ -101,41 +102,6 @@ documentation. See standalone-profile.xsl for details.
</para> </para>
</listitem> </listitem>
<listitem>
<para>
<indexterm>
<primary>flex</primary>
</indexterm>
<indexterm>
<primary>lex</primary>
</indexterm>
<indexterm>
<primary>bison</primary>
</indexterm>
<indexterm>
<primary>yacc</primary>
</indexterm>
<application>Flex</application> 2.5.35 or later and
<application>Bison</application> 2.3 or later are required. Other
<application>lex</application> and <application>yacc</application>
programs cannot be used.
</para>
</listitem>
<listitem>
<para>
<indexterm>
<primary>perl</primary>
</indexterm>
<application>Perl</application> 5.14 or later is needed during the build
process and to run some test suites. (This requirement is separate from
the requirements for building <application>PL/Perl</application>; see
below.)
</para>
</listitem>
<listitem> <listitem>
<para> <para>
<indexterm> <indexterm>
@ -349,6 +315,51 @@ documentation. See standalone-profile.xsl for details.
</itemizedlist> </itemizedlist>
</para> </para>
<para>
If you are building from a <productname>Git</productname> tree instead of
using a released source package, or if you want to do server development,
you also need the following packages:
<itemizedlist>
<listitem>
<para>
<indexterm>
<primary>flex</primary>
</indexterm>
<indexterm>
<primary>lex</primary>
</indexterm>
<indexterm>
<primary>bison</primary>
</indexterm>
<indexterm>
<primary>yacc</primary>
</indexterm>
<application>Flex</application> and <application>Bison</application>
are needed to build from a Git checkout, or if you changed the actual
scanner and parser definition files. If you need them, be sure
to get <application>Flex</application> 2.5.35 or later and
<application>Bison</application> 2.3 or later. Other <application>lex</application>
and <application>yacc</application> programs cannot be used.
</para>
</listitem>
<listitem>
<para>
<indexterm>
<primary>perl</primary>
</indexterm>
<application>Perl</application> 5.14 or later is needed to build from a Git checkout,
or if you changed the input files for any of the build steps that
use Perl scripts. If building on Windows you will need
<application>Perl</application> in any case. <application>Perl</application> is
also required to run some test suites.
</para>
</listitem>
</itemizedlist>
</para>
<para> <para>
If you need to get a <acronym>GNU</acronym> package, you can find If you need to get a <acronym>GNU</acronym> package, you can find
it at your local <acronym>GNU</acronym> mirror site (see <ulink it at your local <acronym>GNU</acronym> mirror site (see <ulink

View File

@ -166,8 +166,7 @@ PostgreSQL documentation
<command>pg_resetwal</command> is unable to determine appropriate values <command>pg_resetwal</command> is unable to determine appropriate values
by reading <filename>pg_control</filename>. Safe values can be determined as by reading <filename>pg_control</filename>. Safe values can be determined as
described below. For values that take numeric arguments, hexadecimal described below. For values that take numeric arguments, hexadecimal
values can be specified by using the prefix <literal>0x</literal>. Note values can be specified by using the prefix <literal>0x</literal>.
that these instructions only apply with the standard block size of 8 kB.
</para> </para>
<variablelist> <variablelist>
@ -190,7 +189,6 @@ PostgreSQL documentation
greatest file name in the same directory. The file names are in greatest file name in the same directory. The file names are in
hexadecimal. hexadecimal.
</para> </para>
<!-- XXX: Should there be a multiplier, similar to the other options? -->
</listitem> </listitem>
</varlistentry> </varlistentry>
@ -274,7 +272,6 @@ PostgreSQL documentation
names are in hexadecimal, so the easiest way to do this is to specify names are in hexadecimal, so the easiest way to do this is to specify
the option value in hexadecimal and append four zeroes. the option value in hexadecimal and append four zeroes.
</para> </para>
<!-- 65536 = SLRU_PAGES_PER_SEGMENT * BLCKSZ / sizeof(MultiXactOffset) -->
</listitem> </listitem>
</varlistentry> </varlistentry>
@ -309,7 +306,6 @@ PostgreSQL documentation
The file names are in hexadecimal. There is no simple recipe such as The file names are in hexadecimal. There is no simple recipe such as
the ones for other options of appending zeroes. the ones for other options of appending zeroes.
</para> </para>
<!-- 52352 = SLRU_PAGES_PER_SEGMENT * floor(BLCKSZ/20) * 4; see multixact.c -->
</listitem> </listitem>
</varlistentry> </varlistentry>
@ -358,7 +354,6 @@ PostgreSQL documentation
in <filename>pg_xact</filename>, <literal>-u 0x700000</literal> will work (five in <filename>pg_xact</filename>, <literal>-u 0x700000</literal> will work (five
trailing zeroes provide the proper multiplier). trailing zeroes provide the proper multiplier).
</para> </para>
<!-- 1048576 = SLRU_PAGES_PER_SEGMENT * BLCKSZ * CLOG_XACTS_PER_BYTE -->
</listitem> </listitem>
</varlistentry> </varlistentry>
@ -380,7 +375,6 @@ PostgreSQL documentation
in <filename>pg_xact</filename>, <literal>-x 0x1200000</literal> will work (five in <filename>pg_xact</filename>, <literal>-x 0x1200000</literal> will work (five
trailing zeroes provide the proper multiplier). trailing zeroes provide the proper multiplier).
</para> </para>
<!-- 1048576 = SLRU_PAGES_PER_SEGMENT * BLCKSZ * CLOG_XACTS_PER_BYTE -->
</listitem> </listitem>
</varlistentry> </varlistentry>
</variablelist> </variablelist>

View File

@ -16,6 +16,16 @@
has some discussion on working with Git. has some discussion on working with Git.
</para> </para>
<para>
Note that building <productname>PostgreSQL</productname> from the source
repository requires reasonably up-to-date versions of <application>bison</application>,
<application>flex</application>, and <application>Perl</application>.
These tools are not needed to build from a distribution tarball, because
the files generated with these tools are included in the tarball.
Other tool requirements
are the same as shown in <xref linkend="install-requirements"/>.
</para>
<sect1 id="git"> <sect1 id="git">
<title>Getting the Source via <productname>Git</productname></title> <title>Getting the Source via <productname>Git</productname></title>

View File

@ -72,7 +72,7 @@ configure. Alternatively use a separate check out for meson based builds.
@0@ @0@
****''' ****'''
if fs.exists(meson.current_source_dir() / 'src' / 'include' / 'pg_config.h') if fs.exists(meson.current_source_dir() / 'src' / 'include' / 'pg_config.h')
errmsg_cleanup = 'To clean up, run make distclean in the source tree.' errmsg_cleanup = 'To clean up, run make maintainer-clean in the source tree.'
error(errmsg_nonclean_base.format(errmsg_cleanup)) error(errmsg_nonclean_base.format(errmsg_cleanup))
endif endif

View File

@ -59,13 +59,16 @@ uninstall: uninstall-local
uninstall-local: uninstall-local:
rm -f $(addprefix '$(DESTDIR)$(pgxsdir)/$(subdir)'/, Makefile.global Makefile.port Makefile.shlib nls-global.mk) rm -f $(addprefix '$(DESTDIR)$(pgxsdir)/$(subdir)'/, Makefile.global Makefile.port Makefile.shlib nls-global.mk)
distprep:
$(MAKE) -C test/isolation $@
clean: clean:
$(MAKE) -C test $@ $(MAKE) -C test $@
$(MAKE) -C tutorial NO_PGXS=1 $@ $(MAKE) -C tutorial NO_PGXS=1 $@
$(MAKE) -C test/isolation $@ $(MAKE) -C test/isolation $@
$(MAKE) -C tools/pg_bsd_indent $@ $(MAKE) -C tools/pg_bsd_indent $@
distclean: distclean maintainer-clean:
$(MAKE) -C test $@ $(MAKE) -C test $@
$(MAKE) -C tutorial NO_PGXS=1 $@ $(MAKE) -C tutorial NO_PGXS=1 $@
$(MAKE) -C test/isolation $@ $(MAKE) -C test/isolation $@

View File

@ -19,11 +19,11 @@
# #
# Meta configuration # Meta configuration
standard_targets = all install installdirs uninstall clean distclean coverage check checkprep installcheck init-po update-po standard_targets = all install installdirs uninstall distprep clean distclean maintainer-clean coverage check checkprep installcheck init-po update-po
# these targets should recurse even into subdirectories not being built: # these targets should recurse even into subdirectories not being built:
standard_always_targets = clean distclean standard_always_targets = distprep clean distclean maintainer-clean
.PHONY: $(standard_targets) maintainer-clean install-strip html man installcheck-parallel update-unicode .PHONY: $(standard_targets) install-strip html man installcheck-parallel update-unicode
# make `all' the default target # make `all' the default target
all: all:
@ -35,8 +35,6 @@ all:
# Never delete any intermediate files automatically. # Never delete any intermediate files automatically.
.SECONDARY: .SECONDARY:
maintainer-clean: distclean
# PostgreSQL version number # PostgreSQL version number
VERSION = @PACKAGE_VERSION@ VERSION = @PACKAGE_VERSION@
MAJORVERSION = @PG_MAJORVERSION@ MAJORVERSION = @PG_MAJORVERSION@
@ -321,8 +319,12 @@ X = @EXEEXT@
# Perl # Perl
ifneq (@PERL@,)
# quoted to protect pathname with spaces # quoted to protect pathname with spaces
PERL = '@PERL@' PERL = '@PERL@'
else
PERL = $(missing) perl
endif
perl_archlibexp = @perl_archlibexp@ perl_archlibexp = @perl_archlibexp@
perl_privlibexp = @perl_privlibexp@ perl_privlibexp = @perl_privlibexp@
perl_includespec = @perl_includespec@ perl_includespec = @perl_includespec@
@ -775,13 +777,21 @@ TAS = @TAS@
# Global targets and rules # Global targets and rules
%.c: %.l %.c: %.l
ifdef FLEX
$(FLEX) $(if $(FLEX_NO_BACKUP),-b) $(FLEXFLAGS) -o'$@' $< $(FLEX) $(if $(FLEX_NO_BACKUP),-b) $(FLEXFLAGS) -o'$@' $<
@$(if $(FLEX_NO_BACKUP),if [ `wc -l <lex.backup` -eq 1 ]; then rm lex.backup; else echo "Scanner requires backup; see lex.backup." 1>&2; exit 1; fi) @$(if $(FLEX_NO_BACKUP),if [ `wc -l <lex.backup` -eq 1 ]; then rm lex.backup; else echo "Scanner requires backup; see lex.backup." 1>&2; exit 1; fi)
$(if $(FLEX_FIX_WARNING),$(PERL) $(top_srcdir)/src/tools/fix-old-flex-code.pl '$@') $(if $(FLEX_FIX_WARNING),$(PERL) $(top_srcdir)/src/tools/fix-old-flex-code.pl '$@')
else
@$(missing) flex $< '$@'
endif
%.c: %.y %.c: %.y
$(if $(BISON_CHECK_CMD),$(BISON_CHECK_CMD)) $(if $(BISON_CHECK_CMD),$(BISON_CHECK_CMD))
ifdef BISON
$(BISON) $(BISONFLAGS) -o $@ $< $(BISON) $(BISONFLAGS) -o $@ $<
else
@$(missing) bison $< $@
endif
%.i: %.c %.i: %.c
$(CPP) $(CPPFLAGS) -o $@ $< $(CPP) $(CPPFLAGS) -o $@ $<
@ -909,7 +919,7 @@ recurse = $(foreach target,$(if $1,$1,$(standard_targets)),$(foreach subdir,$(if
# If a makefile's list of SUBDIRS varies depending on configuration, then # If a makefile's list of SUBDIRS varies depending on configuration, then
# any subdirectories excluded from SUBDIRS should instead be added to # any subdirectories excluded from SUBDIRS should instead be added to
# ALWAYS_SUBDIRS, and then it must call recurse_always as well as recurse. # ALWAYS_SUBDIRS, and then it must call recurse_always as well as recurse.
# This ensures that distclean, etc will apply to all subdirectories. # This ensures that distprep, distclean, etc will apply to all subdirectories.
# In the normal case all arguments will be defaulted. # In the normal case all arguments will be defaulted.
# $1: targets to make recursive (defaults to standard_always_targets) # $1: targets to make recursive (defaults to standard_always_targets)
# $2: list of subdirs (defaults to ALWAYS_SUBDIRS variable) # $2: list of subdirs (defaults to ALWAYS_SUBDIRS variable)
@ -964,7 +974,7 @@ include $(Po_files)
endif endif
# hook for clean-up # hook for clean-up
clean distclean: clean-deps clean distclean maintainer-clean: clean-deps
.PHONY: clean-deps .PHONY: clean-deps
clean-deps: clean-deps:
@ -1048,7 +1058,7 @@ lcov_test.info: $(all_gcda_files)
# hook for clean-up # hook for clean-up
clean distclean: clean-coverage clean distclean maintainer-clean: clean-coverage
.PHONY: clean-coverage .PHONY: clean-coverage
clean-coverage: clean-coverage:

View File

@ -138,33 +138,42 @@ utils/activity/wait_event_types.h: utils/activity/generate-wait_event_types.pl u
# run this unconditionally to avoid needing to know its dependencies here: # run this unconditionally to avoid needing to know its dependencies here:
submake-catalog-headers: submake-catalog-headers:
$(MAKE) -C catalog generated-header-symlinks $(MAKE) -C catalog distprep generated-header-symlinks
# run this unconditionally to avoid needing to know its dependencies here: # run this unconditionally to avoid needing to know its dependencies here:
submake-nodes-headers: submake-nodes-headers:
$(MAKE) -C nodes generated-header-symlinks $(MAKE) -C nodes distprep generated-header-symlinks
# run this unconditionally to avoid needing to know its dependencies here: # run this unconditionally to avoid needing to know its dependencies here:
submake-utils-headers: submake-utils-headers:
$(MAKE) -C utils generated-header-symlinks $(MAKE) -C utils distprep generated-header-symlinks
.PHONY: submake-catalog-headers submake-nodes-headers submake-utils-headers .PHONY: submake-catalog-headers submake-nodes-headers submake-utils-headers
# Make symlinks for these headers in the include directory. That way # Make symlinks for these headers in the include directory. That way
# we can cut down on the -I options. Also, a symlink is automatically # we can cut down on the -I options. Also, a symlink is automatically
# up to date when we update the base file. # up to date when we update the base file.
#
# The point of the prereqdir incantation in some of the rules below is to
# force the symlink to use an absolute path rather than a relative path.
# For headers which are generated by make distprep, the actual header within
# src/backend will be in the source tree, while the symlink in src/include
# will be in the build tree, so a simple ../.. reference won't work.
# For headers generated during regular builds, we prefer a relative symlink.
.PHONY: generated-headers .PHONY: generated-headers
generated-headers: $(top_builddir)/src/include/storage/lwlocknames.h $(top_builddir)/src/include/utils/wait_event_types.h submake-catalog-headers submake-nodes-headers submake-utils-headers parser/gram.h generated-headers: $(top_builddir)/src/include/storage/lwlocknames.h $(top_builddir)/src/include/utils/wait_event_types.h submake-catalog-headers submake-nodes-headers submake-utils-headers
$(top_builddir)/src/include/storage/lwlocknames.h: storage/lmgr/lwlocknames.h $(top_builddir)/src/include/storage/lwlocknames.h: storage/lmgr/lwlocknames.h
rm -f '$@' prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
$(LN_S) ../../backend/$< '$@' cd '$(dir $@)' && rm -f $(notdir $@) && \
$(LN_S) "$$prereqdir/$(notdir $<)" .
$(top_builddir)/src/include/utils/wait_event_types.h: utils/activity/wait_event_types.h $(top_builddir)/src/include/utils/wait_event_types.h: utils/activity/wait_event_types.h
rm -f '$@' prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
$(LN_S) ../../backend/$< '$@' cd '$(dir $@)' && rm -f $(notdir $@) && \
$(LN_S) "$$prereqdir/$(notdir $<)" .
utils/probes.o: utils/probes.d $(SUBDIROBJS) utils/probes.o: utils/probes.d $(SUBDIROBJS)
$(DTRACE) $(DTRACEFLAGS) -C -G -s $(call expand_subsys,$^) -o $@ $(DTRACE) $(DTRACEFLAGS) -C -G -s $(call expand_subsys,$^) -o $@
@ -172,12 +181,16 @@ utils/probes.o: utils/probes.d $(SUBDIROBJS)
########################################################################## ##########################################################################
# This target is only needed by nls.mk. # Be sure that these files get removed by the maintainer-clean target
.PHONY: generated-parser-sources distprep:
generated-parser-sources:
$(MAKE) -C parser gram.c gram.h scan.c $(MAKE) -C parser gram.c gram.h scan.c
$(MAKE) -C bootstrap bootparse.c bootparse.h bootscanner.c $(MAKE) -C bootstrap bootparse.c bootparse.h bootscanner.c
$(MAKE) -C catalog distprep
$(MAKE) -C nodes distprep
$(MAKE) -C replication repl_gram.c repl_gram.h repl_scanner.c syncrep_gram.c syncrep_gram.h syncrep_scanner.c $(MAKE) -C replication repl_gram.c repl_gram.h repl_scanner.c syncrep_gram.c syncrep_gram.h syncrep_scanner.c
$(MAKE) -C storage/lmgr lwlocknames.h lwlocknames.c
$(MAKE) -C utils distprep
$(MAKE) -C utils/activity wait_event_types.h pgstat_wait_event.c
$(MAKE) -C utils/adt jsonpath_gram.c jsonpath_gram.h jsonpath_scan.c $(MAKE) -C utils/adt jsonpath_gram.c jsonpath_gram.h jsonpath_scan.c
$(MAKE) -C utils/misc guc-file.c $(MAKE) -C utils/misc guc-file.c
@ -277,9 +290,34 @@ ifeq ($(PORTNAME), win32)
endif endif
distclean: clean distclean: clean
# generated by configure
rm -f port/tas.s port/pg_sema.c port/pg_shmem.c rm -f port/tas.s port/pg_sema.c port/pg_shmem.c
maintainer-clean: distclean
$(MAKE) -C catalog $@
$(MAKE) -C nodes $@
$(MAKE) -C utils $@
rm -f bootstrap/bootparse.c \
bootstrap/bootparse.h \
bootstrap/bootscanner.c \
parser/gram.c \
parser/gram.h \
parser/scan.c \
replication/repl_gram.c \
replication/repl_gram.h \
replication/repl_scanner.c \
replication/syncrep_gram.c \
replication/syncrep_gram.h \
replication/syncrep_scanner.c \
storage/lmgr/lwlocknames.c \
storage/lmgr/lwlocknames.h \
utils/activity/pgstat_wait_event.c \
utils/activity/wait_event_funcs_data.c \
utils/activity/wait_event_types.h \
utils/adt/jsonpath_gram.c \
utils/adt/jsonpath_gram.h \
utils/adt/jsonpath_scan.c \
utils/misc/guc-file.c
########################################################################## ##########################################################################
# #

View File

@ -28,7 +28,5 @@ bootparse.c: BISONFLAGS += -d
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
bootparse.o bootscanner.o: bootparse.h bootparse.o bootscanner.o: bootparse.h
clean: # bootparse.c and bootscanner.c are in the distribution tarball, so
rm -f bootparse.c \ # they are not cleaned here.
bootparse.h \
bootscanner.c

View File

@ -152,7 +152,9 @@ POSTGRES_BKI_DATA = $(addprefix $(top_srcdir)/src/include/catalog/,\
pg_type.dat \ pg_type.dat \
) )
all: generated-header-symlinks all: distprep generated-header-symlinks
distprep: bki-stamp
.PHONY: generated-header-symlinks .PHONY: generated-header-symlinks
@ -171,12 +173,14 @@ bki-stamp: genbki.pl Catalog.pm $(POSTGRES_BKI_SRCS) $(POSTGRES_BKI_DATA) $(top_
--set-version=$(MAJORVERSION) $(POSTGRES_BKI_SRCS) --set-version=$(MAJORVERSION) $(POSTGRES_BKI_SRCS)
touch $@ touch $@
# The generated headers must all be symlinked into src/include/. # The generated headers must all be symlinked into builddir/src/include/,
# using absolute links for the reasons explained in src/backend/Makefile.
# We use header-stamp to record that we've done this because the symlinks # We use header-stamp to record that we've done this because the symlinks
# themselves may appear older than bki-stamp. # themselves may appear older than bki-stamp.
$(top_builddir)/src/include/catalog/header-stamp: bki-stamp $(top_builddir)/src/include/catalog/header-stamp: bki-stamp
prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
cd '$(dir $@)' && for file in $(GENERATED_HEADERS); do \ cd '$(dir $@)' && for file in $(GENERATED_HEADERS); do \
rm -f $$file && $(LN_S) "../../../$(subdir)/$$file" . ; \ rm -f $$file && $(LN_S) "$$prereqdir/$$file" . ; \
done done
touch $@ touch $@
@ -197,5 +201,9 @@ installdirs:
uninstall-data: uninstall-data:
rm -f $(addprefix '$(DESTDIR)$(datadir)'/, postgres.bki system_constraints.sql system_functions.sql system_views.sql information_schema.sql sql_features.txt) rm -f $(addprefix '$(DESTDIR)$(datadir)'/, postgres.bki system_constraints.sql system_functions.sql system_views.sql information_schema.sql sql_features.txt)
# postgres.bki, system_constraints.sql, and the generated headers are
# in the distribution tarball, so they are not cleaned here.
clean: clean:
maintainer-clean: clean
rm -f bki-stamp postgres.bki system_constraints.sql $(GENERATED_HEADERS) rm -f bki-stamp postgres.bki system_constraints.sql $(GENERATED_HEADERS)

View File

@ -633,7 +633,7 @@ X034 XMLAgg YES
X035 XMLAgg: ORDER BY option YES X035 XMLAgg: ORDER BY option YES
X036 XMLComment YES X036 XMLComment YES
X037 XMLPI YES X037 XMLPI YES
X038 XMLText YES supported except for RETURNING X038 XMLText NO
X040 Basic table mapping YES X040 Basic table mapping YES
X041 Basic table mapping: null absent YES X041 Basic table mapping: null absent YES
X042 Basic table mapping: null as nil YES X042 Basic table mapping: null as nil YES

View File

@ -71,6 +71,6 @@ uninstall-types:
include $(top_srcdir)/src/Makefile.shlib include $(top_srcdir)/src/Makefile.shlib
clean distclean: clean-lib clean distclean maintainer-clean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)
rm -f llvmjit_types.bc rm -f llvmjit_types.bc

View File

@ -22,7 +22,7 @@ GETTEXT_FLAGS = $(BACKEND_COMMON_GETTEXT_FLAGS) \
report_invalid_record:2:c-format \ report_invalid_record:2:c-format \
ereport_startup_progress:1:c-format ereport_startup_progress:1:c-format
gettext-files: generated-parser-sources generated-headers gettext-files: distprep
find $(srcdir) $(srcdir)/../common $(srcdir)/../port -name '*.c' -print | LC_ALL=C sort >$@ find $(srcdir) $(srcdir)/../common $(srcdir)/../port -name '*.c' -print | LC_ALL=C sort >$@
my-clean: my-clean:

View File

@ -65,7 +65,9 @@ node_headers = \
# see also catalog/Makefile for an explanation of these make rules # see also catalog/Makefile for an explanation of these make rules
all: generated-header-symlinks all: distprep generated-header-symlinks
distprep: node-support-stamp
.PHONY: generated-header-symlinks .PHONY: generated-header-symlinks
@ -79,12 +81,14 @@ node-support-stamp: gen_node_support.pl $(addprefix $(top_srcdir)/src/include/,$
$(PERL) $^ $(PERL) $^
touch $@ touch $@
# These generated headers must be symlinked into src/include/. # These generated headers must be symlinked into builddir/src/include/,
# using absolute links for the reasons explained in src/backend/Makefile.
# We use header-stamp to record that we've done this because the symlinks # We use header-stamp to record that we've done this because the symlinks
# themselves may appear older than node-support-stamp. # themselves may appear older than node-support-stamp.
$(top_builddir)/src/include/nodes/header-stamp: node-support-stamp $(top_builddir)/src/include/nodes/header-stamp: node-support-stamp
prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
cd '$(dir $@)' && for file in nodetags.h; do \ cd '$(dir $@)' && for file in nodetags.h; do \
rm -f $$file && $(LN_S) "../../../$(subdir)/$$file" . ; \ rm -f $$file && $(LN_S) "$$prereqdir/$$file" . ; \
done done
touch $@ touch $@
@ -94,5 +98,5 @@ outfuncs.o: outfuncs.c outfuncs.funcs.c outfuncs.switch.c | node-support-stamp
queryjumblefuncs.o: queryjumblefuncs.c queryjumblefuncs.funcs.c queryjumblefuncs.switch.c | node-support-stamp queryjumblefuncs.o: queryjumblefuncs.c queryjumblefuncs.funcs.c queryjumblefuncs.switch.c | node-support-stamp
readfuncs.o: readfuncs.c readfuncs.funcs.c readfuncs.switch.c | node-support-stamp readfuncs.o: readfuncs.c readfuncs.funcs.c readfuncs.switch.c | node-support-stamp
clean: maintainer-clean: clean
rm -f node-support-stamp $(addsuffix funcs.funcs.c,copy equal out queryjumble read) $(addsuffix funcs.switch.c,copy equal out queryjumble read) nodetags.h rm -f node-support-stamp $(addsuffix funcs.funcs.c,copy equal out queryjumble read) $(addsuffix funcs.switch.c,copy equal out queryjumble read) nodetags.h

View File

@ -1321,7 +1321,7 @@ innerrel_is_unique_ext(PlannerInfo *root,
* supersets of them anyway. * supersets of them anyway.
*/ */
old_context = MemoryContextSwitchTo(root->planner_cxt); old_context = MemoryContextSwitchTo(root->planner_cxt);
uniqueRelInfo = makeNode(UniqueRelInfo); uniqueRelInfo = palloc(sizeof(UniqueRelInfo));
uniqueRelInfo->extra_clauses = outer_exprs; uniqueRelInfo->extra_clauses = outer_exprs;
uniqueRelInfo->outerrelids = bms_copy(outerrelids); uniqueRelInfo->outerrelids = bms_copy(outerrelids);
innerrel->unique_for_rels = lappend(innerrel->unique_for_rels, innerrel->unique_for_rels = lappend(innerrel->unique_for_rels,

View File

@ -64,8 +64,8 @@ scan.c: FLEX_FIX_WARNING=yes
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
gram.o scan.o parser.o: gram.h gram.o scan.o parser.o: gram.h
clean:
rm -f gram.c \ # gram.c, gram.h, and scan.c are in the distribution tarball, so they
gram.h \ # are not cleaned here.
scan.c clean distclean maintainer-clean:
rm -f lex.backup rm -f lex.backup

View File

@ -111,6 +111,18 @@ transformAggregateCall(ParseState *pstate, Aggref *agg,
int save_next_resno; int save_next_resno;
ListCell *lc; ListCell *lc;
/*
* Before separating the args into direct and aggregated args, make a list
* of their data type OIDs for use later.
*/
foreach(lc, args)
{
Expr *arg = (Expr *) lfirst(lc);
argtypes = lappend_oid(argtypes, exprType((Node *) arg));
}
agg->aggargtypes = argtypes;
if (AGGKIND_IS_ORDERED_SET(agg->aggkind)) if (AGGKIND_IS_ORDERED_SET(agg->aggkind))
{ {
/* /*
@ -222,29 +234,6 @@ transformAggregateCall(ParseState *pstate, Aggref *agg,
agg->aggorder = torder; agg->aggorder = torder;
agg->aggdistinct = tdistinct; agg->aggdistinct = tdistinct;
/*
* Now build the aggargtypes list with the type OIDs of the direct and
* aggregated args, ignoring any resjunk entries that might have been
* added by ORDER BY/DISTINCT processing. We can't do this earlier
* because said processing can modify some args' data types, in particular
* by resolving previously-unresolved "unknown" literals.
*/
foreach(lc, agg->aggdirectargs)
{
Expr *arg = (Expr *) lfirst(lc);
argtypes = lappend_oid(argtypes, exprType((Node *) arg));
}
foreach(lc, tlist)
{
TargetEntry *tle = (TargetEntry *) lfirst(lc);
if (tle->resjunk)
continue; /* ignore junk */
argtypes = lappend_oid(argtypes, exprType((Node *) tle->expr));
}
agg->aggargtypes = argtypes;
check_agglevels_and_constraints(pstate, (Node *) agg); check_agglevels_and_constraints(pstate, (Node *) agg);
} }

View File

@ -43,6 +43,6 @@ else
$(CC) $(CFLAGS) -c $< $(CC) $(CFLAGS) -c $<
endif endif
clean: distclean clean:
rm -f tas_cpp.s rm -f tas_cpp.s
$(MAKE) -C win32 clean $(MAKE) -C win32 clean

View File

@ -48,10 +48,6 @@ syncrep_gram.c: BISONFLAGS += -d
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
syncrep_gram.o syncrep_scanner.o: syncrep_gram.h syncrep_gram.o syncrep_scanner.o: syncrep_gram.h
clean: # repl_gram.c, repl_scanner.c, syncrep_gram.c and syncrep_scanner.c
rm -f repl_gram.c \ # are in the distribution tarball, so they are not cleaned here.
repl_gram.h \ # (Our parent Makefile takes care of them during maintainer-clean.)
repl_scanner.c \
syncrep_gram.c \
syncrep_gram.h \
syncrep_scanner.c

View File

@ -33,5 +33,5 @@ installdirs: installdirs-lib
uninstall: uninstall-lib uninstall: uninstall-lib
clean distclean: clean-lib clean distclean maintainer-clean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)

View File

@ -2019,6 +2019,7 @@ void
apply_spooled_messages(FileSet *stream_fileset, TransactionId xid, apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
XLogRecPtr lsn) XLogRecPtr lsn)
{ {
StringInfoData s2;
int nchanges; int nchanges;
char path[MAXPGPATH]; char path[MAXPGPATH];
char *buffer = NULL; char *buffer = NULL;
@ -2056,6 +2057,7 @@ apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
CurrentResourceOwner = oldowner; CurrentResourceOwner = oldowner;
buffer = palloc(BLCKSZ); buffer = palloc(BLCKSZ);
initStringInfo(&s2);
MemoryContextSwitchTo(oldcxt); MemoryContextSwitchTo(oldcxt);
@ -2077,7 +2079,6 @@ apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
nchanges = 0; nchanges = 0;
while (true) while (true)
{ {
StringInfoData s2;
size_t nbytes; size_t nbytes;
int len; int len;
@ -2103,8 +2104,9 @@ apply_spooled_messages(FileSet *stream_fileset, TransactionId xid,
BufFileTell(stream_fd, &fileno, &offset); BufFileTell(stream_fd, &fileno, &offset);
/* init a stringinfo using the buffer and call apply_dispatch */ /* copy the buffer to the stringinfo and call apply_dispatch */
initReadOnlyStringInfo(&s2, buffer, len); resetStringInfo(&s2);
appendBinaryStringInfo(&s2, buffer, len);
/* Ensure we are reading the data into our memory context. */ /* Ensure we are reading the data into our memory context. */
oldcxt = MemoryContextSwitchTo(ApplyMessageContext); oldcxt = MemoryContextSwitchTo(ApplyMessageContext);

View File

@ -28,5 +28,5 @@ installdirs: installdirs-lib
uninstall: uninstall-lib uninstall: uninstall-lib
clean distclean: clean-lib clean distclean maintainer-clean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)

View File

@ -132,6 +132,7 @@ typedef enum WalRcvWakeupReason
static TimestampTz wakeup[NUM_WALRCV_WAKEUPS]; static TimestampTz wakeup[NUM_WALRCV_WAKEUPS];
static StringInfoData reply_message; static StringInfoData reply_message;
static StringInfoData incoming_message;
/* Prototypes for private functions */ /* Prototypes for private functions */
static void WalRcvFetchTimeLineHistoryFiles(TimeLineID first, TimeLineID last); static void WalRcvFetchTimeLineHistoryFiles(TimeLineID first, TimeLineID last);
@ -424,6 +425,7 @@ WalReceiverMain(void)
/* Initialize LogstreamResult and buffers for processing messages */ /* Initialize LogstreamResult and buffers for processing messages */
LogstreamResult.Write = LogstreamResult.Flush = GetXLogReplayRecPtr(NULL); LogstreamResult.Write = LogstreamResult.Flush = GetXLogReplayRecPtr(NULL);
initStringInfo(&reply_message); initStringInfo(&reply_message);
initStringInfo(&incoming_message);
/* Initialize nap wakeup times. */ /* Initialize nap wakeup times. */
now = GetCurrentTimestamp(); now = GetCurrentTimestamp();
@ -841,20 +843,19 @@ XLogWalRcvProcessMsg(unsigned char type, char *buf, Size len, TimeLineID tli)
TimestampTz sendTime; TimestampTz sendTime;
bool replyRequested; bool replyRequested;
resetStringInfo(&incoming_message);
switch (type) switch (type)
{ {
case 'w': /* WAL records */ case 'w': /* WAL records */
{ {
StringInfoData incoming_message; /* copy message to StringInfo */
hdrlen = sizeof(int64) + sizeof(int64) + sizeof(int64); hdrlen = sizeof(int64) + sizeof(int64) + sizeof(int64);
if (len < hdrlen) if (len < hdrlen)
ereport(ERROR, ereport(ERROR,
(errcode(ERRCODE_PROTOCOL_VIOLATION), (errcode(ERRCODE_PROTOCOL_VIOLATION),
errmsg_internal("invalid WAL message received from primary"))); errmsg_internal("invalid WAL message received from primary")));
appendBinaryStringInfo(&incoming_message, buf, hdrlen);
/* initialize a StringInfo with the given buffer */
initReadOnlyStringInfo(&incoming_message, buf, hdrlen);
/* read the fields */ /* read the fields */
dataStart = pq_getmsgint64(&incoming_message); dataStart = pq_getmsgint64(&incoming_message);
@ -869,16 +870,13 @@ XLogWalRcvProcessMsg(unsigned char type, char *buf, Size len, TimeLineID tli)
} }
case 'k': /* Keepalive */ case 'k': /* Keepalive */
{ {
StringInfoData incoming_message; /* copy message to StringInfo */
hdrlen = sizeof(int64) + sizeof(int64) + sizeof(char); hdrlen = sizeof(int64) + sizeof(int64) + sizeof(char);
if (len != hdrlen) if (len != hdrlen)
ereport(ERROR, ereport(ERROR,
(errcode(ERRCODE_PROTOCOL_VIOLATION), (errcode(ERRCODE_PROTOCOL_VIOLATION),
errmsg_internal("invalid keepalive message received from primary"))); errmsg_internal("invalid keepalive message received from primary")));
appendBinaryStringInfo(&incoming_message, buf, hdrlen);
/* initialize a StringInfo with the given buffer */
initReadOnlyStringInfo(&incoming_message, buf, hdrlen);
/* read the fields */ /* read the fields */
walEnd = pq_getmsgint64(&incoming_message); walEnd = pq_getmsgint64(&incoming_message);

View File

@ -104,6 +104,8 @@ include $(top_srcdir)/src/Makefile.shlib
$(SQLSCRIPT): snowball_create.pl snowball_func.sql.in snowball.sql.in $(SQLSCRIPT): snowball_create.pl snowball_func.sql.in snowball.sql.in
$(PERL) $< --input ${srcdir} --outdir . $(PERL) $< --input ${srcdir} --outdir .
distprep: $(SQLSCRIPT)
install: all installdirs install-lib install-script install: all installdirs install-lib install-script
$(INSTALL_DATA) $(addprefix $(srcdir)/stopwords/,$(stop_files)) '$(DESTDIR)$(datadir)/$(DICTDIR)' $(INSTALL_DATA) $(addprefix $(srcdir)/stopwords/,$(stop_files)) '$(DESTDIR)$(datadir)/$(DICTDIR)'
@ -120,4 +122,6 @@ uninstall: uninstall-lib
clean distclean: clean-lib clean distclean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)
maintainer-clean: distclean
rm -f $(SQLSCRIPT) rm -f $(SQLSCRIPT)

View File

@ -74,13 +74,8 @@ pg_signal_backend(int pid, int sig)
return SIGNAL_BACKEND_ERROR; return SIGNAL_BACKEND_ERROR;
} }
/* /* Only allow superusers to signal superuser-owned backends. */
* Only allow superusers to signal superuser-owned backends. Any process if (superuser_arg(proc->roleId) && !superuser())
* not advertising a role might have the importance of a superuser-owned
* backend, so treat it that way.
*/
if ((!OidIsValid(proc->roleId) || superuser_arg(proc->roleId)) &&
!superuser())
return SIGNAL_BACKEND_NOSUPERUSER; return SIGNAL_BACKEND_NOSUPERUSER;
/* Users can signal backends they have role membership in. */ /* Users can signal backends they have role membership in. */

View File

@ -45,6 +45,8 @@ lwlocknames.h: $(top_srcdir)/src/backend/storage/lmgr/lwlocknames.txt generate-l
check: s_lock_test check: s_lock_test
./s_lock_test ./s_lock_test
clean: clean distclean:
rm -f s_lock_test rm -f s_lock_test
maintainer-clean: clean
rm -f lwlocknames.h lwlocknames.c rm -f lwlocknames.h lwlocknames.c

View File

@ -34,14 +34,13 @@ catalogdir = $(top_srcdir)/src/backend/catalog
include $(top_srcdir)/src/backend/common.mk include $(top_srcdir)/src/backend/common.mk
all: probes.h generated-header-symlinks all: distprep probes.h generated-header-symlinks
.PHONY: generated-header-symlinks submake-adt-headers distprep: fmgr-stamp errcodes.h
generated-header-symlinks: $(top_builddir)/src/include/utils/header-stamp submake-adt-headers .PHONY: generated-header-symlinks
submake-adt-headers: generated-header-symlinks: $(top_builddir)/src/include/utils/header-stamp $(top_builddir)/src/include/utils/probes.h
$(MAKE) -C adt jsonpath_gram.h
$(SUBDIRS:%=%-recursive): fmgr-stamp errcodes.h $(SUBDIRS:%=%-recursive): fmgr-stamp errcodes.h
@ -67,15 +66,22 @@ probes.h: Gen_dummy_probes.sed probes.d
sed -f $^ >$@ sed -f $^ >$@
endif endif
# These generated headers must be symlinked into src/include/. # These generated headers must be symlinked into builddir/src/include/,
# using absolute links for the reasons explained in src/backend/Makefile.
# We use header-stamp to record that we've done this because the symlinks # We use header-stamp to record that we've done this because the symlinks
# themselves may appear older than fmgr-stamp. # themselves may appear older than fmgr-stamp.
$(top_builddir)/src/include/utils/header-stamp: fmgr-stamp errcodes.h probes.h $(top_builddir)/src/include/utils/header-stamp: fmgr-stamp errcodes.h
cd '$(dir $@)' && for file in fmgroids.h fmgrprotos.h errcodes.h probes.h; do \ prereqdir=`cd '$(dir $<)' >/dev/null && pwd` && \
rm -f $$file && $(LN_S) "../../../$(subdir)/$$file" . ; \ cd '$(dir $@)' && for file in fmgroids.h fmgrprotos.h errcodes.h; do \
rm -f $$file && $(LN_S) "$$prereqdir/$$file" . ; \
done done
touch $@ touch $@
# probes.h is handled differently because it's not in the distribution tarball.
$(top_builddir)/src/include/utils/probes.h: probes.h
cd '$(dir $@)' && rm -f $(notdir $@) && \
$(LN_S) "../../../$(subdir)/probes.h" .
# Recipe for rebuilding the Perl version of Gen_dummy_probes # Recipe for rebuilding the Perl version of Gen_dummy_probes
# Nothing depends on it, so it will never be called unless explicitly requested # Nothing depends on it, so it will never be called unless explicitly requested
# The last two lines of the recipe format the script according to our # The last two lines of the recipe format the script according to our
@ -98,6 +104,10 @@ installdirs:
uninstall-data: uninstall-data:
rm -f $(addprefix '$(DESTDIR)$(datadir)'/, errcodes.txt) rm -f $(addprefix '$(DESTDIR)$(datadir)'/, errcodes.txt)
# fmgroids.h, fmgrprotos.h, fmgrtab.c, fmgr-stamp, and errcodes.h are in the
# distribution tarball, so they are not cleaned here.
clean: clean:
rm -f probes.h probes.h.tmp rm -f probes.h probes.h.tmp
maintainer-clean: clean
rm -f fmgroids.h fmgrprotos.h fmgrtab.c fmgr-stamp errcodes.h rm -f fmgroids.h fmgrprotos.h fmgrtab.c fmgr-stamp errcodes.h

View File

@ -47,5 +47,5 @@ pgstat_wait_event.c: wait_event_types.h
wait_event_types.h: $(top_srcdir)/src/backend/utils/activity/wait_event_names.txt generate-wait_event_types.pl wait_event_types.h: $(top_srcdir)/src/backend/utils/activity/wait_event_names.txt generate-wait_event_types.pl
$(PERL) $(srcdir)/generate-wait_event_types.pl --code $< $(PERL) $(srcdir)/generate-wait_event_types.pl --code $<
clean: maintainer-clean: clean
rm -f wait_event_types.h pgstat_wait_event.c wait_event_funcs_data.c rm -f wait_event_types.h pgstat_wait_event.c wait_event_funcs_data.c

View File

@ -132,9 +132,10 @@ jsonpath_scan.c: FLEX_NO_BACKUP=yes
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
jsonpath_gram.o jsonpath_scan.o: jsonpath_gram.h jsonpath_gram.o jsonpath_scan.o: jsonpath_gram.h
clean: # jsonpath_gram.c and jsonpath_scan.c are in the distribution tarball,
# so they are not cleaned here.
clean distclean maintainer-clean:
rm -f lex.backup rm -f lex.backup
rm -f jsonpath_gram.c jsonpath_gram.h jsonpath_scan.c
like.o: like.c like_match.c like.o: like.c like_match.c

View File

@ -19,7 +19,6 @@
#include "access/htup_details.h" #include "access/htup_details.h"
#include "catalog/pg_type.h" #include "catalog/pg_type.h"
#include "common/int.h"
#include "funcapi.h" #include "funcapi.h"
#include "libpq/pqformat.h" #include "libpq/pqformat.h"
#include "nodes/nodeFuncs.h" #include "nodes/nodeFuncs.h"
@ -2327,38 +2326,22 @@ array_set_element(Datum arraydatum,
addedbefore = addedafter = 0; addedbefore = addedafter = 0;
/* /*
* Check subscripts. We assume the existing subscripts passed * Check subscripts
* ArrayCheckBounds, so that dim[i] + lb[i] can be computed without
* overflow. But we must beware of other overflows in our calculations of
* new dim[] values.
*/ */
if (ndim == 1) if (ndim == 1)
{ {
if (indx[0] < lb[0]) if (indx[0] < lb[0])
{ {
/* addedbefore = lb[0] - indx[0]; */ addedbefore = lb[0] - indx[0];
/* dim[0] += addedbefore; */ dim[0] += addedbefore;
if (pg_sub_s32_overflow(lb[0], indx[0], &addedbefore) ||
pg_add_s32_overflow(dim[0], addedbefore, &dim[0]))
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("array size exceeds the maximum allowed (%d)",
(int) MaxArraySize)));
lb[0] = indx[0]; lb[0] = indx[0];
if (addedbefore > 1) if (addedbefore > 1)
newhasnulls = true; /* will insert nulls */ newhasnulls = true; /* will insert nulls */
} }
if (indx[0] >= (dim[0] + lb[0])) if (indx[0] >= (dim[0] + lb[0]))
{ {
/* addedafter = indx[0] - (dim[0] + lb[0]) + 1; */ addedafter = indx[0] - (dim[0] + lb[0]) + 1;
/* dim[0] += addedafter; */ dim[0] += addedafter;
if (pg_sub_s32_overflow(indx[0], dim[0] + lb[0], &addedafter) ||
pg_add_s32_overflow(addedafter, 1, &addedafter) ||
pg_add_s32_overflow(dim[0], addedafter, &dim[0]))
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("array size exceeds the maximum allowed (%d)",
(int) MaxArraySize)));
if (addedafter > 1) if (addedafter > 1)
newhasnulls = true; /* will insert nulls */ newhasnulls = true; /* will insert nulls */
} }
@ -2604,23 +2587,14 @@ array_set_element_expanded(Datum arraydatum,
addedbefore = addedafter = 0; addedbefore = addedafter = 0;
/* /*
* Check subscripts (this logic must match array_set_element). We assume * Check subscripts (this logic matches original array_set_element)
* the existing subscripts passed ArrayCheckBounds, so that dim[i] + lb[i]
* can be computed without overflow. But we must beware of other
* overflows in our calculations of new dim[] values.
*/ */
if (ndim == 1) if (ndim == 1)
{ {
if (indx[0] < lb[0]) if (indx[0] < lb[0])
{ {
/* addedbefore = lb[0] - indx[0]; */ addedbefore = lb[0] - indx[0];
/* dim[0] += addedbefore; */ dim[0] += addedbefore;
if (pg_sub_s32_overflow(lb[0], indx[0], &addedbefore) ||
pg_add_s32_overflow(dim[0], addedbefore, &dim[0]))
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("array size exceeds the maximum allowed (%d)",
(int) MaxArraySize)));
lb[0] = indx[0]; lb[0] = indx[0];
dimschanged = true; dimschanged = true;
if (addedbefore > 1) if (addedbefore > 1)
@ -2628,15 +2602,8 @@ array_set_element_expanded(Datum arraydatum,
} }
if (indx[0] >= (dim[0] + lb[0])) if (indx[0] >= (dim[0] + lb[0]))
{ {
/* addedafter = indx[0] - (dim[0] + lb[0]) + 1; */ addedafter = indx[0] - (dim[0] + lb[0]) + 1;
/* dim[0] += addedafter; */ dim[0] += addedafter;
if (pg_sub_s32_overflow(indx[0], dim[0] + lb[0], &addedafter) ||
pg_add_s32_overflow(addedafter, 1, &addedafter) ||
pg_add_s32_overflow(dim[0], addedafter, &dim[0]))
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("array size exceeds the maximum allowed (%d)",
(int) MaxArraySize)));
dimschanged = true; dimschanged = true;
if (addedafter > 1) if (addedafter > 1)
newhasnulls = true; /* will insert nulls */ newhasnulls = true; /* will insert nulls */
@ -2919,10 +2886,7 @@ array_set_slice(Datum arraydatum,
addedbefore = addedafter = 0; addedbefore = addedafter = 0;
/* /*
* Check subscripts. We assume the existing subscripts passed * Check subscripts
* ArrayCheckBounds, so that dim[i] + lb[i] can be computed without
* overflow. But we must beware of other overflows in our calculations of
* new dim[] values.
*/ */
if (ndim == 1) if (ndim == 1)
{ {
@ -2937,31 +2901,18 @@ array_set_slice(Datum arraydatum,
errmsg("upper bound cannot be less than lower bound"))); errmsg("upper bound cannot be less than lower bound")));
if (lowerIndx[0] < lb[0]) if (lowerIndx[0] < lb[0])
{ {
/* addedbefore = lb[0] - lowerIndx[0]; */ if (upperIndx[0] < lb[0] - 1)
/* dim[0] += addedbefore; */
if (pg_sub_s32_overflow(lb[0], lowerIndx[0], &addedbefore) ||
pg_add_s32_overflow(dim[0], addedbefore, &dim[0]))
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("array size exceeds the maximum allowed (%d)",
(int) MaxArraySize)));
lb[0] = lowerIndx[0];
if (addedbefore > 1)
newhasnulls = true; /* will insert nulls */ newhasnulls = true; /* will insert nulls */
addedbefore = lb[0] - lowerIndx[0];
dim[0] += addedbefore;
lb[0] = lowerIndx[0];
} }
if (upperIndx[0] >= (dim[0] + lb[0])) if (upperIndx[0] >= (dim[0] + lb[0]))
{ {
/* addedafter = upperIndx[0] - (dim[0] + lb[0]) + 1; */ if (lowerIndx[0] > (dim[0] + lb[0]))
/* dim[0] += addedafter; */
if (pg_sub_s32_overflow(upperIndx[0], dim[0] + lb[0], &addedafter) ||
pg_add_s32_overflow(addedafter, 1, &addedafter) ||
pg_add_s32_overflow(dim[0], addedafter, &dim[0]))
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("array size exceeds the maximum allowed (%d)",
(int) MaxArraySize)));
if (addedafter > 1)
newhasnulls = true; /* will insert nulls */ newhasnulls = true; /* will insert nulls */
addedafter = upperIndx[0] - (dim[0] + lb[0]) + 1;
dim[0] += addedafter;
} }
} }
else else

View File

@ -64,6 +64,10 @@ ArrayGetOffset0(int n, const int *tup, const int *scale)
* This must do overflow checking, since it is used to validate that a user * This must do overflow checking, since it is used to validate that a user
* dimensionality request doesn't overflow what we can handle. * dimensionality request doesn't overflow what we can handle.
* *
* We limit array sizes to at most about a quarter billion elements,
* so that it's not necessary to check for overflow in quite so many
* places --- for instance when palloc'ing Datum arrays.
*
* The multiplication overflow check only works on machines that have int64 * The multiplication overflow check only works on machines that have int64
* arithmetic, but that is nearly all platforms these days, and doing check * arithmetic, but that is nearly all platforms these days, and doing check
* divides for those that don't seems way too expensive. * divides for those that don't seems way too expensive.
@ -84,6 +88,8 @@ ArrayGetNItemsSafe(int ndim, const int *dims, struct Node *escontext)
int32 ret; int32 ret;
int i; int i;
#define MaxArraySize ((Size) (MaxAllocSize / sizeof(Datum)))
if (ndim <= 0) if (ndim <= 0)
return 0; return 0;
ret = 1; ret = 1;

View File

@ -47,7 +47,6 @@
#ifdef USE_LIBXML #ifdef USE_LIBXML
#include <libxml/chvalid.h> #include <libxml/chvalid.h>
#include <libxml/entities.h>
#include <libxml/parser.h> #include <libxml/parser.h>
#include <libxml/parserInternals.h> #include <libxml/parserInternals.h>
#include <libxml/tree.h> #include <libxml/tree.h>
@ -514,27 +513,6 @@ xmlcomment(PG_FUNCTION_ARGS)
} }
Datum
xmltext(PG_FUNCTION_ARGS)
{
#ifdef USE_LIBXML
text *arg = PG_GETARG_TEXT_PP(0);
text *result;
xmlChar *xmlbuf = NULL;
xmlbuf = xmlEncodeSpecialChars(NULL, xml_text2xmlChar(arg));
Assert(xmlbuf);
result = cstring_to_text_with_len((const char *) xmlbuf, xmlStrlen(xmlbuf));
xmlFree(xmlbuf);
PG_RETURN_XML_P(result);
#else
NO_XML_SUPPORT();
return 0;
#endif /* not USE_LIBXML */
}
/* /*
* TODO: xmlconcat needs to merge the notations and unparsed entities * TODO: xmlconcat needs to merge the notations and unparsed entities

View File

@ -604,97 +604,6 @@ RegisterSnapshotInvalidation(Oid dbId, Oid relId)
dbId, relId); dbId, relId);
} }
/*
* PrepareInvalidationState
* Initialize inval data for the current (sub)transaction.
*/
static void
PrepareInvalidationState(void)
{
TransInvalidationInfo *myInfo;
if (transInvalInfo != NULL &&
transInvalInfo->my_level == GetCurrentTransactionNestLevel())
return;
myInfo = (TransInvalidationInfo *)
MemoryContextAllocZero(TopTransactionContext,
sizeof(TransInvalidationInfo));
myInfo->parent = transInvalInfo;
myInfo->my_level = GetCurrentTransactionNestLevel();
/* Now, do we have a previous stack entry? */
if (transInvalInfo != NULL)
{
/* Yes; this one should be for a deeper nesting level. */
Assert(myInfo->my_level > transInvalInfo->my_level);
/*
* The parent (sub)transaction must not have any current (i.e.,
* not-yet-locally-processed) messages. If it did, we'd have a
* semantic problem: the new subtransaction presumably ought not be
* able to see those events yet, but since the CommandCounter is
* linear, that can't work once the subtransaction advances the
* counter. This is a convenient place to check for that, as well as
* being important to keep management of the message arrays simple.
*/
if (NumMessagesInGroup(&transInvalInfo->CurrentCmdInvalidMsgs) != 0)
elog(ERROR, "cannot start a subtransaction when there are unprocessed inval messages");
/*
* MemoryContextAllocZero set firstmsg = nextmsg = 0 in each group,
* which is fine for the first (sub)transaction, but otherwise we need
* to update them to follow whatever is already in the arrays.
*/
SetGroupToFollow(&myInfo->PriorCmdInvalidMsgs,
&transInvalInfo->CurrentCmdInvalidMsgs);
SetGroupToFollow(&myInfo->CurrentCmdInvalidMsgs,
&myInfo->PriorCmdInvalidMsgs);
}
else
{
/*
* Here, we need only clear any array pointers left over from a prior
* transaction.
*/
InvalMessageArrays[CatCacheMsgs].msgs = NULL;
InvalMessageArrays[CatCacheMsgs].maxmsgs = 0;
InvalMessageArrays[RelCacheMsgs].msgs = NULL;
InvalMessageArrays[RelCacheMsgs].maxmsgs = 0;
}
transInvalInfo = myInfo;
}
/* ----------------------------------------------------------------
* public functions
* ----------------------------------------------------------------
*/
void
InvalidateSystemCachesExtended(bool debug_discard)
{
int i;
InvalidateCatalogSnapshot();
ResetCatalogCaches();
RelationCacheInvalidate(debug_discard); /* gets smgr and relmap too */
for (i = 0; i < syscache_callback_count; i++)
{
struct SYSCACHECALLBACK *ccitem = syscache_callback_list + i;
ccitem->function(ccitem->arg, ccitem->id, 0);
}
for (i = 0; i < relcache_callback_count; i++)
{
struct RELCACHECALLBACK *ccitem = relcache_callback_list + i;
ccitem->function(ccitem->arg, InvalidOid);
}
}
/* /*
* LocalExecuteInvalidationMessage * LocalExecuteInvalidationMessage
* *
@ -795,6 +704,36 @@ InvalidateSystemCaches(void)
InvalidateSystemCachesExtended(false); InvalidateSystemCachesExtended(false);
} }
void
InvalidateSystemCachesExtended(bool debug_discard)
{
int i;
InvalidateCatalogSnapshot();
ResetCatalogCaches();
RelationCacheInvalidate(debug_discard); /* gets smgr and relmap too */
for (i = 0; i < syscache_callback_count; i++)
{
struct SYSCACHECALLBACK *ccitem = syscache_callback_list + i;
ccitem->function(ccitem->arg, ccitem->id, 0);
}
for (i = 0; i < relcache_callback_count; i++)
{
struct RELCACHECALLBACK *ccitem = relcache_callback_list + i;
ccitem->function(ccitem->arg, InvalidOid);
}
}
/* ----------------------------------------------------------------
* public functions
* ----------------------------------------------------------------
*/
/* /*
* AcceptInvalidationMessages * AcceptInvalidationMessages
* Read and process invalidation messages from the shared invalidation * Read and process invalidation messages from the shared invalidation
@ -848,6 +787,68 @@ AcceptInvalidationMessages(void)
#endif #endif
} }
/*
* PrepareInvalidationState
* Initialize inval data for the current (sub)transaction.
*/
static void
PrepareInvalidationState(void)
{
TransInvalidationInfo *myInfo;
if (transInvalInfo != NULL &&
transInvalInfo->my_level == GetCurrentTransactionNestLevel())
return;
myInfo = (TransInvalidationInfo *)
MemoryContextAllocZero(TopTransactionContext,
sizeof(TransInvalidationInfo));
myInfo->parent = transInvalInfo;
myInfo->my_level = GetCurrentTransactionNestLevel();
/* Now, do we have a previous stack entry? */
if (transInvalInfo != NULL)
{
/* Yes; this one should be for a deeper nesting level. */
Assert(myInfo->my_level > transInvalInfo->my_level);
/*
* The parent (sub)transaction must not have any current (i.e.,
* not-yet-locally-processed) messages. If it did, we'd have a
* semantic problem: the new subtransaction presumably ought not be
* able to see those events yet, but since the CommandCounter is
* linear, that can't work once the subtransaction advances the
* counter. This is a convenient place to check for that, as well as
* being important to keep management of the message arrays simple.
*/
if (NumMessagesInGroup(&transInvalInfo->CurrentCmdInvalidMsgs) != 0)
elog(ERROR, "cannot start a subtransaction when there are unprocessed inval messages");
/*
* MemoryContextAllocZero set firstmsg = nextmsg = 0 in each group,
* which is fine for the first (sub)transaction, but otherwise we need
* to update them to follow whatever is already in the arrays.
*/
SetGroupToFollow(&myInfo->PriorCmdInvalidMsgs,
&transInvalInfo->CurrentCmdInvalidMsgs);
SetGroupToFollow(&myInfo->CurrentCmdInvalidMsgs,
&myInfo->PriorCmdInvalidMsgs);
}
else
{
/*
* Here, we need only clear any array pointers left over from a prior
* transaction.
*/
InvalMessageArrays[CatCacheMsgs].msgs = NULL;
InvalMessageArrays[CatCacheMsgs].maxmsgs = 0;
InvalMessageArrays[RelCacheMsgs].msgs = NULL;
InvalMessageArrays[RelCacheMsgs].maxmsgs = 0;
}
transInvalInfo = myInfo;
}
/* /*
* PostPrepare_Inval * PostPrepare_Inval
* Clean up after successful PREPARE. * Clean up after successful PREPARE.

View File

@ -842,14 +842,6 @@ InitializeSessionUserIdStandalone(void)
AuthenticatedUserId = BOOTSTRAP_SUPERUSERID; AuthenticatedUserId = BOOTSTRAP_SUPERUSERID;
SetSessionUserId(BOOTSTRAP_SUPERUSERID, true); SetSessionUserId(BOOTSTRAP_SUPERUSERID, true);
/*
* XXX This should set SetConfigOption("session_authorization"), too.
* Since we don't, C code will get NULL, and current_setting() will get an
* empty string.
*/
SetConfigOption("is_superuser", "on",
PGC_INTERNAL, PGC_S_DYNAMIC_DEFAULT);
} }
/* /*

View File

@ -21,5 +21,5 @@ OBJS = \
include $(top_srcdir)/src/backend/common.mk include $(top_srcdir)/src/backend/common.mk
clean distclean: clean distclean maintainer-clean:
$(MAKE) -C conversion_procs $@ $(MAKE) -C conversion_procs $@

View File

@ -66,6 +66,8 @@ all: $(MAPS)
distclean: clean distclean: clean
rm -f $(TEXTS) rm -f $(TEXTS)
maintainer-clean: distclean
rm -f $(MAPS) rm -f $(MAPS)

View File

@ -13,5 +13,5 @@ installdirs: installdirs-lib
uninstall: uninstall-lib uninstall: uninstall-lib
clean distclean: clean-lib clean distclean maintainer-clean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)

View File

@ -40,5 +40,6 @@ endif
include $(top_srcdir)/src/backend/common.mk include $(top_srcdir)/src/backend/common.mk
# Note: guc-file.c is not deleted by 'make clean',
# since we want to ship it in distribution tarballs.
clean: clean:
rm -f guc-file.c

View File

@ -54,7 +54,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/initdb$(X)' rm -f '$(DESTDIR)$(bindir)/initdb$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f initdb$(X) $(OBJS) localtime.c rm -f initdb$(X) $(OBJS) localtime.c
rm -rf tmp_check rm -rf tmp_check

View File

@ -40,7 +40,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_amcheck$(X)' rm -f '$(DESTDIR)$(bindir)/pg_amcheck$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_amcheck$(X) $(OBJS) rm -f pg_amcheck$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -25,7 +25,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_archivecleanup$(X)' rm -f '$(DESTDIR)$(bindir)/pg_archivecleanup$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_archivecleanup$(X) $(OBJS) rm -f pg_archivecleanup$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -71,7 +71,7 @@ uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_receivewal$(X)' rm -f '$(DESTDIR)$(bindir)/pg_receivewal$(X)'
rm -f '$(DESTDIR)$(bindir)/pg_recvlogical$(X)' rm -f '$(DESTDIR)$(bindir)/pg_recvlogical$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_basebackup$(X) pg_receivewal$(X) pg_recvlogical$(X) \ rm -f pg_basebackup$(X) pg_receivewal$(X) pg_recvlogical$(X) \
$(BBOBJS) pg_receivewal.o pg_recvlogical.o \ $(BBOBJS) pg_receivewal.o pg_recvlogical.o \
$(OBJS) $(OBJS)

View File

@ -36,7 +36,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_checksums$(X)' rm -f '$(DESTDIR)$(bindir)/pg_checksums$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_checksums$(X) $(OBJS) rm -f pg_checksums$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -33,7 +33,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_config$(X)' rm -f '$(DESTDIR)$(bindir)/pg_config$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_config$(X) $(OBJS) rm -f pg_config$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -33,7 +33,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_controldata$(X)' rm -f '$(DESTDIR)$(bindir)/pg_controldata$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_controldata$(X) $(OBJS) rm -f pg_controldata$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -42,7 +42,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_ctl$(X)' rm -f '$(DESTDIR)$(bindir)/pg_ctl$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_ctl$(X) $(OBJS) rm -f pg_ctl$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -69,6 +69,6 @@ installcheck:
uninstall: uninstall:
rm -f $(addprefix '$(DESTDIR)$(bindir)'/, pg_dump$(X) pg_restore$(X) pg_dumpall$(X)) rm -f $(addprefix '$(DESTDIR)$(bindir)'/, pg_dump$(X) pg_restore$(X) pg_dumpall$(X))
clean distclean: clean distclean maintainer-clean:
rm -f pg_dump$(X) pg_restore$(X) pg_dumpall$(X) $(OBJS) pg_dump.o common.o pg_dump_sort.o pg_restore.o pg_dumpall.o rm -f pg_dump$(X) pg_restore$(X) pg_dumpall$(X) $(OBJS) pg_dump.o common.o pg_dump_sort.o pg_restore.o pg_dumpall.o
rm -rf tmp_check rm -rf tmp_check

View File

@ -35,7 +35,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_resetwal$(X)' rm -f '$(DESTDIR)$(bindir)/pg_resetwal$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_resetwal$(X) $(OBJS) rm -f pg_resetwal$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -14,7 +14,6 @@ program_options_handling_ok('pg_resetwal');
my $node = PostgreSQL::Test::Cluster->new('main'); my $node = PostgreSQL::Test::Cluster->new('main');
$node->init; $node->init;
$node->append_conf('postgresql.conf', 'track_commit_timestamp = on');
command_like([ 'pg_resetwal', '-n', $node->data_dir ], command_like([ 'pg_resetwal', '-n', $node->data_dir ],
qr/checkpoint/, 'pg_resetwal -n produces output'); qr/checkpoint/, 'pg_resetwal -n produces output');
@ -30,207 +29,4 @@ SKIP:
'check PGDATA permissions'); 'check PGDATA permissions');
} }
command_ok([ 'pg_resetwal', '-D', $node->data_dir ], 'pg_resetwal runs');
$node->start;
is($node->safe_psql("postgres", "SELECT 1;"),
1, 'server running and working after reset');
command_fails_like(
[ 'pg_resetwal', $node->data_dir ],
qr/lock file .* exists/,
'fails if server running');
$node->stop('immediate');
command_fails_like(
[ 'pg_resetwal', $node->data_dir ],
qr/database server was not shut down cleanly/,
'does not run after immediate shutdown');
command_ok(
[ 'pg_resetwal', '-f', $node->data_dir ],
'runs after immediate shutdown with force');
$node->start;
is($node->safe_psql("postgres", "SELECT 1;"),
1, 'server running and working after forced reset');
$node->stop;
# check various command-line handling
# Note: This test intends to check that a nonexistent data directory
# gives a reasonable error message. Because of the way the code is
# currently structured, you get an error about readings permissions,
# which is perhaps suboptimal, so feel free to update this test if
# this gets improved.
command_fails_like(
[ 'pg_resetwal', 'foo' ],
qr/error: could not read permissions of directory/,
'fails with nonexistent data directory');
command_fails_like(
[ 'pg_resetwal', 'foo', 'bar' ],
qr/too many command-line arguments/,
'fails with too many command-line arguments');
$ENV{PGDATA} = $node->data_dir; # not used
command_fails_like(
['pg_resetwal'],
qr/no data directory specified/,
'fails with too few command-line arguments');
# error cases
# -c
command_fails_like(
[ 'pg_resetwal', '-c', 'foo', $node->data_dir ],
qr/error: invalid argument for option -c/,
'fails with incorrect -c option');
command_fails_like(
[ 'pg_resetwal', '-c', '10,bar', $node->data_dir ],
qr/error: invalid argument for option -c/,
'fails with incorrect -c option part 2');
command_fails_like(
[ 'pg_resetwal', '-c', '1,10', $node->data_dir ],
qr/greater than/,
'fails with -c value 1 part 1');
command_fails_like(
[ 'pg_resetwal', '-c', '10,1', $node->data_dir ],
qr/greater than/,
'fails with -c value 1 part 2');
# -e
command_fails_like(
[ 'pg_resetwal', '-e', 'foo', $node->data_dir ],
qr/error: invalid argument for option -e/,
'fails with incorrect -e option');
command_fails_like(
[ 'pg_resetwal', '-e', '-1', $node->data_dir ],
qr/must not be -1/,
'fails with -e value -1');
# -l
command_fails_like(
[ 'pg_resetwal', '-l', 'foo', $node->data_dir ],
qr/error: invalid argument for option -l/,
'fails with incorrect -l option');
# -m
command_fails_like(
[ 'pg_resetwal', '-m', 'foo', $node->data_dir ],
qr/error: invalid argument for option -m/,
'fails with incorrect -m option');
command_fails_like(
[ 'pg_resetwal', '-m', '10,bar', $node->data_dir ],
qr/error: invalid argument for option -m/,
'fails with incorrect -m option part 2');
command_fails_like(
[ 'pg_resetwal', '-m', '0,10', $node->data_dir ],
qr/must not be 0/,
'fails with -m value 0 part 1');
command_fails_like(
[ 'pg_resetwal', '-m', '10,0', $node->data_dir ],
qr/must not be 0/,
'fails with -m value 0 part 2');
# -o
command_fails_like(
[ 'pg_resetwal', '-o', 'foo', $node->data_dir ],
qr/error: invalid argument for option -o/,
'fails with incorrect -o option');
command_fails_like(
[ 'pg_resetwal', '-o', '0', $node->data_dir ],
qr/must not be 0/,
'fails with -o value 0');
# -O
command_fails_like(
[ 'pg_resetwal', '-O', 'foo', $node->data_dir ],
qr/error: invalid argument for option -O/,
'fails with incorrect -O option');
command_fails_like(
[ 'pg_resetwal', '-O', '-1', $node->data_dir ],
qr/must not be -1/,
'fails with -O value -1');
# --wal-segsize
command_fails_like(
[ 'pg_resetwal', '--wal-segsize', 'foo', $node->data_dir ],
qr/error: invalid value/,
'fails with incorrect --wal-segsize option');
command_fails_like(
[ 'pg_resetwal', '--wal-segsize', '13', $node->data_dir ],
qr/must be a power/,
'fails with invalid --wal-segsize value');
# -u
command_fails_like(
[ 'pg_resetwal', '-u', 'foo', $node->data_dir ],
qr/error: invalid argument for option -u/,
'fails with incorrect -u option');
command_fails_like(
[ 'pg_resetwal', '-u', '1', $node->data_dir ],
qr/must be greater than/,
'fails with -u value too small');
# -x
command_fails_like(
[ 'pg_resetwal', '-x', 'foo', $node->data_dir ],
qr/error: invalid argument for option -x/,
'fails with incorrect -x option');
command_fails_like(
[ 'pg_resetwal', '-x', '1', $node->data_dir ],
qr/must be greater than/,
'fails with -x value too small');
# run with control override options
my $out = (run_command([ 'pg_resetwal', '-n', $node->data_dir ]))[0];
$out =~ /^Database block size: *(\d+)$/m or die;
my $blcksz = $1;
my @cmd = ('pg_resetwal', '-D', $node->data_dir);
# some not-so-critical hardcoded values
push @cmd, '-e', 1;
push @cmd, '-l', '00000001000000320000004B';
push @cmd, '-o', 100_000;
push @cmd, '--wal-segsize', 1;
# these use the guidance from the documentation
sub get_slru_files
{
opendir(my $dh, $node->data_dir . '/' . $_[0]) or die $!;
my @files = sort grep { /[0-9A-F]+/ } readdir $dh;
closedir $dh;
return @files;
}
my (@files, $mult);
@files = get_slru_files('pg_commit_ts');
# XXX: Should there be a multiplier, similar to the other options?
# -c argument is "old,new"
push @cmd,
'-c',
sprintf("%d,%d", hex($files[0]) == 0 ? 3 : hex($files[0]), hex($files[-1]));
@files = get_slru_files('pg_multixact/offsets');
$mult = 32 * $blcksz / 4;
# -m argument is "new,old"
push @cmd, '-m',
sprintf("%d,%d",
(hex($files[-1]) + 1) * $mult,
hex($files[0]) == 0 ? 1 : hex($files[0] * $mult));
@files = get_slru_files('pg_multixact/members');
$mult = 32 * int($blcksz / 20) * 4;
push @cmd, '-O', (hex($files[-1]) + 1) * $mult;
@files = get_slru_files('pg_xact');
$mult = 32 * $blcksz * 4;
push @cmd,
'-u', (hex($files[0]) == 0 ? 3 : hex($files[0]) * $mult),
'-x', ((hex($files[-1]) + 1) * $mult);
command_ok([ @cmd, '-n' ], 'runs with control override options, dry run');
command_ok(\@cmd, 'runs with control override options');
command_like(
[ 'pg_resetwal', '-n', $node->data_dir ],
qr/^Latest checkpoint's NextOID: *100000$/m,
'spot check that control changes were applied');
$node->start;
ok(1, 'server started after reset');
done_testing(); done_testing();

View File

@ -55,12 +55,4 @@ command_checks_all(
], ],
'processes zero WAL segment size'); 'processes zero WAL segment size');
# now try to run it
command_fails_like(
[ 'pg_resetwal', $node->data_dir ],
qr/not proceeding because control file values were guessed/,
'does not run when control file values were guessed');
command_ok([ 'pg_resetwal', '-f', $node->data_dir ],
'runs with force when control file values were guessed');
done_testing(); done_testing();

View File

@ -49,7 +49,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_rewind$(X)' rm -f '$(DESTDIR)$(bindir)/pg_rewind$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_rewind$(X) $(OBJS) xlogreader.c rm -f pg_rewind$(X) $(OBJS) xlogreader.c
rm -rf tmp_check rm -rf tmp_check

View File

@ -31,6 +31,6 @@ installcheck:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_test_fsync$(X)' rm -f '$(DESTDIR)$(bindir)/pg_test_fsync$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_test_fsync$(X) $(OBJS) rm -f pg_test_fsync$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -31,6 +31,6 @@ installcheck:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_test_timing$(X)' rm -f '$(DESTDIR)$(bindir)/pg_test_timing$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_test_timing$(X) $(OBJS) rm -f pg_test_timing$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -49,7 +49,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_upgrade$(X)' rm -f '$(DESTDIR)$(bindir)/pg_upgrade$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_upgrade$(X) $(OBJS) rm -f pg_upgrade$(X) $(OBJS)
rm -rf delete_old_cluster.sh log/ tmp_check/ \ rm -rf delete_old_cluster.sh log/ tmp_check/ \
reindex_hash.sql reindex_hash.sql

View File

@ -38,7 +38,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_verifybackup$(X)' rm -f '$(DESTDIR)$(bindir)/pg_verifybackup$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_verifybackup$(X) $(OBJS) rm -f pg_verifybackup$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check

View File

@ -45,7 +45,7 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/pg_waldump$(X)' rm -f '$(DESTDIR)$(bindir)/pg_waldump$(X)'
clean distclean: clean distclean maintainer-clean:
rm -f pg_waldump$(X) $(OBJS) $(RMGRDESCSOURCES) xlogreader.c xlogstats.c rm -f pg_waldump$(X) $(OBJS) $(RMGRDESCSOURCES) xlogreader.c xlogstats.c
rm -rf tmp_check rm -rf tmp_check

View File

@ -38,6 +38,8 @@ exprparse.c: BISONFLAGS += -d
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
exprparse.o exprscan.o: exprparse.h exprparse.o exprscan.o: exprparse.h
distprep: exprparse.c exprscan.c
install: all installdirs install: all installdirs
$(INSTALL_PROGRAM) pgbench$(X) '$(DESTDIR)$(bindir)/pgbench$(X)' $(INSTALL_PROGRAM) pgbench$(X) '$(DESTDIR)$(bindir)/pgbench$(X)'
@ -50,6 +52,8 @@ uninstall:
clean distclean: clean distclean:
rm -f pgbench$(X) $(OBJS) rm -f pgbench$(X) $(OBJS)
rm -rf tmp_check rm -rf tmp_check
maintainer-clean: distclean
rm -f exprparse.h exprparse.c exprscan.c rm -f exprparse.h exprparse.c exprscan.c
check: check:

View File

@ -62,6 +62,8 @@ psqlscanslash.c: FLEXFLAGS = -Cfe -p -p
psqlscanslash.c: FLEX_NO_BACKUP=yes psqlscanslash.c: FLEX_NO_BACKUP=yes
psqlscanslash.c: FLEX_FIX_WARNING=yes psqlscanslash.c: FLEX_FIX_WARNING=yes
distprep: sql_help.h sql_help.c psqlscanslash.c
install: all installdirs install: all installdirs
$(INSTALL_PROGRAM) psql$(X) '$(DESTDIR)$(bindir)/psql$(X)' $(INSTALL_PROGRAM) psql$(X) '$(DESTDIR)$(bindir)/psql$(X)'
$(INSTALL_DATA) $(srcdir)/psqlrc.sample '$(DESTDIR)$(datadir)/psqlrc.sample' $(INSTALL_DATA) $(srcdir)/psqlrc.sample '$(DESTDIR)$(datadir)/psqlrc.sample'
@ -75,6 +77,10 @@ uninstall:
clean distclean: clean distclean:
rm -f psql$(X) $(OBJS) lex.backup rm -f psql$(X) $(OBJS) lex.backup
rm -rf tmp_check rm -rf tmp_check
# files removed here are supposed to be in the distribution tarball,
# so do not clean them in the clean/distclean rules
maintainer-clean: distclean
rm -f sql_help.h sql_help.c psqlscanslash.c rm -f sql_help.h sql_help.c psqlscanslash.c
check: check:

View File

@ -48,7 +48,7 @@ installdirs:
uninstall: uninstall:
rm -f $(addprefix '$(DESTDIR)$(bindir)'/, $(addsuffix $(X), $(PROGRAMS))) rm -f $(addprefix '$(DESTDIR)$(bindir)'/, $(addsuffix $(X), $(PROGRAMS)))
clean distclean: clean distclean maintainer-clean:
rm -f $(addsuffix $(X), $(PROGRAMS)) $(addsuffix .o, $(PROGRAMS)) rm -f $(addsuffix $(X), $(PROGRAMS)) $(addsuffix .o, $(PROGRAMS))
rm -f common.o $(WIN32RES) rm -f common.o $(WIN32RES)
rm -rf tmp_check rm -rf tmp_check

View File

@ -123,6 +123,8 @@ GEN_KEYWORDLIST_DEPS = $(TOOLSDIR)/gen_keywordlist.pl $(TOOLSDIR)/PerfectHash.pm
all: libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a all: libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a
distprep: kwlist_d.h
# libpgcommon is needed by some contrib # libpgcommon is needed by some contrib
install: all installdirs install: all installdirs
$(INSTALL_STLIB) libpgcommon.a '$(DESTDIR)$(libdir)/libpgcommon.a' $(INSTALL_STLIB) libpgcommon.a '$(DESTDIR)$(libdir)/libpgcommon.a'
@ -195,7 +197,10 @@ RYU_OBJS = $(RYU_FILES) $(RYU_FILES:%.o=%_shlib.o) $(RYU_FILES:%.o=%_srv.o)
$(RYU_OBJS): CFLAGS += $(PERMIT_DECLARATION_AFTER_STATEMENT) $(RYU_OBJS): CFLAGS += $(PERMIT_DECLARATION_AFTER_STATEMENT)
# kwlist_d.h is in the distribution tarball, so it is not cleaned here.
clean distclean: clean distclean:
rm -f libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a rm -f libpgcommon.a libpgcommon_shlib.a libpgcommon_srv.a
rm -f $(OBJS_FRONTEND) $(OBJS_SHLIB) $(OBJS_SRV) rm -f $(OBJS_FRONTEND) $(OBJS_SHLIB) $(OBJS_SRV)
maintainer-clean: distclean
rm -f kwlist_d.h rm -f kwlist_d.h

View File

@ -83,3 +83,5 @@ clean:
distclean: clean distclean: clean
rm -f UnicodeData.txt EastAsianWidth.txt CompositionExclusions.txt NormalizationTest.txt norm_test_table.h unicode_norm_table.h rm -f UnicodeData.txt EastAsianWidth.txt CompositionExclusions.txt NormalizationTest.txt norm_test_table.h unicode_norm_table.h
maintainer-clean: distclean

View File

@ -48,6 +48,8 @@ psqlscan.c: FLEXFLAGS = -Cfe -p -p
psqlscan.c: FLEX_NO_BACKUP=yes psqlscan.c: FLEX_NO_BACKUP=yes
psqlscan.c: FLEX_FIX_WARNING=yes psqlscan.c: FLEX_FIX_WARNING=yes
distprep: psqlscan.c
# libpgfeutils could be useful to contrib, so install it # libpgfeutils could be useful to contrib, so install it
install: all installdirs install: all installdirs
$(INSTALL_STLIB) libpgfeutils.a '$(DESTDIR)$(libdir)/libpgfeutils.a' $(INSTALL_STLIB) libpgfeutils.a '$(DESTDIR)$(libdir)/libpgfeutils.a'
@ -60,4 +62,8 @@ uninstall:
clean distclean: clean distclean:
rm -f libpgfeutils.a $(OBJS) lex.backup rm -f libpgfeutils.a $(OBJS) lex.backup
# psqlscan.c is supposed to be in the distribution tarball,
# so do not clean it in the clean/distclean rules
maintainer-clean: distclean
rm -f psqlscan.c rm -f psqlscan.c

View File

@ -78,5 +78,5 @@ clean:
rm -f catalog/pg_*_d.h catalog/header-stamp rm -f catalog/pg_*_d.h catalog/header-stamp
rm -f nodes/nodetags.h nodes/header-stamp rm -f nodes/nodetags.h nodes/header-stamp
distclean: clean distclean maintainer-clean: clean
rm -f pg_config.h pg_config_ext.h pg_config_os.h stamp-h stamp-ext-h rm -f pg_config.h pg_config_ext.h pg_config_os.h stamp-h stamp-ext-h

View File

@ -8793,9 +8793,6 @@
{ oid => '2922', descr => 'serialize an XML value to a character string', { oid => '2922', descr => 'serialize an XML value to a character string',
proname => 'text', prorettype => 'text', proargtypes => 'xml', proname => 'text', prorettype => 'text', proargtypes => 'xml',
prosrc => 'xmltotext' }, prosrc => 'xmltotext' },
{ oid => '3813', descr => 'generate XML text node',
proname => 'xmltext', proisstrict => 't', prorettype => 'xml',
proargtypes => 'text', prosrc => 'xmltext' },
{ oid => '2923', descr => 'map table contents to XML', { oid => '2923', descr => 'map table contents to XML',
proname => 'table_to_xml', procost => '100', provolatile => 's', proname => 'table_to_xml', procost => '100', provolatile => 's',

View File

@ -74,13 +74,6 @@ struct ExprContext;
*/ */
#define MAXDIM 6 #define MAXDIM 6
/*
* Maximum number of elements in an array. We limit this to at most about a
* quarter billion elements, so that it's not necessary to check for overflow
* in quite so many places --- for instance when palloc'ing Datum arrays.
*/
#define MaxArraySize ((Size) (MaxAllocSize / sizeof(Datum)))
/* /*
* Arrays are varlena objects, so must meet the varlena convention that * Arrays are varlena objects, so must meet the varlena convention that
* the first int32 of the object contains the total object size in bytes. * the first int32 of the object contains the total object size in bytes.

View File

@ -23,7 +23,7 @@ install-pgtypeslib-recurse install-ecpglib-recurse install-compatlib-recurse ins
install-compatlib-recurse: install-ecpglib-recurse install-compatlib-recurse: install-ecpglib-recurse
install-ecpglib-recurse: install-pgtypeslib-recurse install-ecpglib-recurse: install-pgtypeslib-recurse
clean distclean: clean distclean maintainer-clean:
$(MAKE) -C test clean $(MAKE) -C test clean
checktcp: | temp-install checktcp: | temp-install

View File

@ -55,3 +55,5 @@ uninstall: uninstall-lib
clean distclean: clean-lib clean distclean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)
maintainer-clean: distclean

View File

@ -66,3 +66,5 @@ uninstall: uninstall-lib
clean distclean: clean-lib clean distclean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)
maintainer-clean: distclean

View File

@ -31,5 +31,5 @@ uninstall:
rm -f $(addprefix '$(DESTDIR)$(informix_esql_dir)'/, $(informix_headers)) rm -f $(addprefix '$(DESTDIR)$(informix_esql_dir)'/, $(informix_headers))
rm -f '$(DESTDIR)$(includedir)'/$(notdir $(ecpg_config_h)) rm -f '$(DESTDIR)$(includedir)'/$(notdir $(ecpg_config_h))
distclean: distclean maintainer-clean:
rm -f ecpg_config.h stamp-h rm -f ecpg_config.h stamp-h

View File

@ -50,3 +50,5 @@ uninstall: uninstall-lib
clean distclean: clean-lib clean distclean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)
maintainer-clean: distclean

View File

@ -81,6 +81,8 @@ ecpg_keywords.o: ecpg_kwlist_d.h
c_keywords.o: c_kwlist_d.h c_keywords.o: c_kwlist_d.h
keywords.o: $(top_srcdir)/src/include/parser/kwlist.h keywords.o: $(top_srcdir)/src/include/parser/kwlist.h
distprep: preproc.y preproc.c preproc.h pgc.c c_kwlist_d.h ecpg_kwlist_d.h
install: all installdirs install: all installdirs
$(INSTALL_PROGRAM) ecpg$(X) '$(DESTDIR)$(bindir)' $(INSTALL_PROGRAM) ecpg$(X) '$(DESTDIR)$(bindir)'
@ -90,7 +92,11 @@ installdirs:
uninstall: uninstall:
rm -f '$(DESTDIR)$(bindir)/ecpg$(X)' rm -f '$(DESTDIR)$(bindir)/ecpg$(X)'
# preproc.y, preproc.c, preproc.h, pgc.c, c_kwlist_d.h, and ecpg_kwlist_d.h
# are in the distribution tarball, so they are not cleaned here.
clean distclean: clean distclean:
rm -f *.o ecpg$(X) rm -f *.o ecpg$(X)
rm -f typename.c rm -f typename.c
maintainer-clean: distclean
rm -f preproc.y preproc.c preproc.h pgc.c c_kwlist_d.h ecpg_kwlist_d.h rm -f preproc.y preproc.c preproc.h pgc.c c_kwlist_d.h ecpg_kwlist_d.h

View File

@ -21,7 +21,7 @@ else
abs_builddir := $(shell sh -c "pwd -W") abs_builddir := $(shell sh -c "pwd -W")
endif endif
all install installdirs uninstall: all install installdirs uninstall distprep:
$(MAKE) -C connect $@ $(MAKE) -C connect $@
$(MAKE) -C sql $@ $(MAKE) -C sql $@
$(MAKE) -C pgtypeslib $@ $(MAKE) -C pgtypeslib $@
@ -30,7 +30,7 @@ all install installdirs uninstall:
$(MAKE) -C compat_oracle $@ $(MAKE) -C compat_oracle $@
$(MAKE) -C thread $@ $(MAKE) -C thread $@
clean distclean: clean distclean maintainer-clean:
$(MAKE) -C connect $@ $(MAKE) -C connect $@
$(MAKE) -C sql $@ $(MAKE) -C sql $@
$(MAKE) -C pgtypeslib $@ $(MAKE) -C pgtypeslib $@

View File

@ -165,3 +165,6 @@ clean distclean: clean-lib
rm -f $(OBJS) pthread.h libpq-refs-stamp rm -f $(OBJS) pthread.h libpq-refs-stamp
# Might be left over from a Win32 client-only build # Might be left over from a Win32 client-only build
rm -f pg_config_paths.h rm -f pg_config_paths.h
maintainer-clean: distclean
$(MAKE) -C test $@

View File

@ -20,5 +20,5 @@ all: $(PROGS)
$(PROGS): $(WIN32RES) $(PROGS): $(WIN32RES)
clean distclean: clean distclean maintainer-clean:
rm -f $(PROGS) *.o rm -f $(PROGS) *.o

View File

@ -391,7 +391,7 @@ ifdef MODULE_big
clean: clean-lib clean: clean-lib
endif endif
distclean: clean distclean maintainer-clean: clean
ifdef REGRESS ifdef REGRESS

View File

@ -171,7 +171,7 @@ all: all-po
install: install-po install: install-po
installdirs: installdirs-po installdirs: installdirs-po
uninstall: uninstall-po uninstall: uninstall-po
clean distclean: clean-po clean distclean maintainer-clean: clean-po
.PHONY: all-po install-po installdirs-po uninstall-po clean-po \ .PHONY: all-po install-po installdirs-po uninstall-po clean-po \
init-po update-po init-po update-po

View File

@ -127,7 +127,7 @@ installcheck: submake
submake: submake:
$(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X) $(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X)
clean distclean: clean-lib clean distclean maintainer-clean: clean-lib
rm -f SPI.c Util.c $(OBJS) perlchunks.h plperl_opmask.h rm -f SPI.c Util.c $(OBJS) perlchunks.h plperl_opmask.h
rm -rf $(pg_regress_clean_files) rm -rf $(pg_regress_clean_files)
ifeq ($(PORTNAME), win32) ifeq ($(PORTNAME), win32)

View File

@ -104,7 +104,14 @@ submake:
$(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X) $(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X)
distprep: pl_gram.h pl_gram.c plerrcodes.h pl_reserved_kwlist_d.h pl_unreserved_kwlist_d.h
# pl_gram.c, pl_gram.h, plerrcodes.h, pl_reserved_kwlist_d.h, and
# pl_unreserved_kwlist_d.h are in the distribution tarball, so they
# are not cleaned here.
clean distclean: clean-lib clean distclean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)
rm -rf $(pg_regress_clean_files) rm -rf $(pg_regress_clean_files)
maintainer-clean: distclean
rm -f pl_gram.c pl_gram.h plerrcodes.h pl_reserved_kwlist_d.h pl_unreserved_kwlist_d.h rm -f pl_gram.c pl_gram.h plerrcodes.h pl_reserved_kwlist_d.h pl_unreserved_kwlist_d.h

View File

@ -142,7 +142,6 @@ clean distclean: clean-lib
ifeq ($(PORTNAME), win32) ifeq ($(PORTNAME), win32)
rm -f python${pytverstr}.def rm -f python${pytverstr}.def
endif endif
rm -f spiexceptions.h
# Force this dependency to be known even without dependency info built: # Force this dependency to be known even without dependency info built:
@ -150,3 +149,8 @@ plpy_plpymodule.o: spiexceptions.h
spiexceptions.h: $(top_srcdir)/src/backend/utils/errcodes.txt generate-spiexceptions.pl spiexceptions.h: $(top_srcdir)/src/backend/utils/errcodes.txt generate-spiexceptions.pl
$(PERL) $(srcdir)/generate-spiexceptions.pl $< > $@ $(PERL) $(srcdir)/generate-spiexceptions.pl $< > $@
distprep: spiexceptions.h
maintainer-clean: distclean
rm -f spiexceptions.h

View File

@ -63,6 +63,8 @@ pltcl.o: pltclerrcodes.h
pltclerrcodes.h: $(top_srcdir)/src/backend/utils/errcodes.txt generate-pltclerrcodes.pl pltclerrcodes.h: $(top_srcdir)/src/backend/utils/errcodes.txt generate-pltclerrcodes.pl
$(PERL) $(srcdir)/generate-pltclerrcodes.pl $< > $@ $(PERL) $(srcdir)/generate-pltclerrcodes.pl $< > $@
distprep: pltclerrcodes.h
install: all install-lib install-data install: all install-lib install-data
installdirs: installdirs-lib installdirs: installdirs-lib
@ -89,10 +91,13 @@ installcheck: submake
submake: submake:
$(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X) $(MAKE) -C $(top_builddir)/src/test/regress pg_regress$(X)
# pltclerrcodes.h is in the distribution tarball, so don't clean it here.
clean distclean: clean-lib clean distclean: clean-lib
rm -f $(OBJS) rm -f $(OBJS)
rm -rf $(pg_regress_clean_files) rm -rf $(pg_regress_clean_files)
ifeq ($(PORTNAME), win32) ifeq ($(PORTNAME), win32)
rm -f $(tclwithver).def rm -f $(tclwithver).def
endif endif
maintainer-clean: distclean
rm -f pltclerrcodes.h rm -f pltclerrcodes.h

View File

@ -154,6 +154,6 @@ pg_config_paths.h: $(top_builddir)/src/Makefile.global
echo "#define HTMLDIR \"$(htmldir)\"" >>$@ echo "#define HTMLDIR \"$(htmldir)\"" >>$@
echo "#define MANDIR \"$(mandir)\"" >>$@ echo "#define MANDIR \"$(mandir)\"" >>$@
clean distclean: clean distclean maintainer-clean:
rm -f libpgport.a libpgport_shlib.a libpgport_srv.a rm -f libpgport.a libpgport_shlib.a libpgport_srv.a
rm -f $(OBJS) $(OBJS_SHLIB) $(OBJS_SRV) pg_config_paths.h rm -f $(OBJS) $(OBJS_SHLIB) $(OBJS_SRV) pg_config_paths.h

View File

@ -19,5 +19,5 @@ check:
installcheck: installcheck:
$(prove_installcheck) $(prove_installcheck)
clean distclean: clean distclean maintainer-clean:
rm -rf tmp_check rm -rf tmp_check

View File

@ -18,5 +18,5 @@ PROGS = testlibpq testlibpq2 testlibpq3 testlibpq4 testlo testlo64
all: $(PROGS) all: $(PROGS)
clean distclean: clean distclean maintainer-clean:
rm -f $(PROGS) *.o rm -f $(PROGS) *.o

View File

@ -21,5 +21,5 @@ check:
installcheck: installcheck:
$(prove_installcheck) $(prove_installcheck)
clean distclean: clean distclean maintainer-clean:
rm -rf tmp_check rm -rf tmp_check

View File

@ -43,6 +43,8 @@ pg_isolation_regress$(X): isolation_main.o pg_regress.o $(WIN32RES)
isolationtester$(X): $(OBJS) | submake-libpq submake-libpgport isolationtester$(X): $(OBJS) | submake-libpq submake-libpgport
$(CC) $(CFLAGS) $^ $(libpq_pgport) $(LDFLAGS) $(LDFLAGS_EX) $(LIBS) -o $@ $(CC) $(CFLAGS) $^ $(libpq_pgport) $(LDFLAGS) $(LDFLAGS_EX) $(LIBS) -o $@
distprep: specparse.c specscanner.c
# See notes in src/backend/parser/Makefile about the following two rules # See notes in src/backend/parser/Makefile about the following two rules
specparse.h: specparse.c specparse.h: specparse.c
touch $@ touch $@
@ -52,10 +54,14 @@ specparse.c: BISONFLAGS += -d
# Force these dependencies to be known even without dependency info built: # Force these dependencies to be known even without dependency info built:
specparse.o specscanner.o: specparse.h specparse.o specscanner.o: specparse.h
# specparse.c and specscanner.c are in the distribution tarball,
# so do not clean them here
clean distclean: clean distclean:
rm -f isolationtester$(X) pg_isolation_regress$(X) $(OBJS) isolation_main.o rm -f isolationtester$(X) pg_isolation_regress$(X) $(OBJS) isolation_main.o
rm -f pg_regress.o rm -f pg_regress.o
rm -rf $(pg_regress_clean_files) rm -rf $(pg_regress_clean_files)
maintainer-clean: distclean
rm -f specparse.h specparse.c specscanner.c rm -f specparse.h specparse.c specscanner.c
installcheck: all installcheck: all

View File

@ -24,5 +24,5 @@ check:
installcheck: installcheck:
$(prove_installcheck) $(prove_installcheck)
clean distclean: clean distclean maintainer-clean:
rm -rf tmp_check rm -rf tmp_check

View File

@ -21,5 +21,5 @@ check:
installcheck: installcheck:
$(prove_installcheck) $(prove_installcheck)
clean distclean: clean distclean maintainer-clean:
rm -rf tmp_check rm -rf tmp_check

View File

@ -10,7 +10,7 @@ DIRS = de_DE.ISO8859-1 gr_GR.ISO8859-7 koi8-r koi8-to-win1251
all: $(PROGS) all: $(PROGS)
clean distclean: clean distclean maintainer-clean:
rm -f $(PROGS) *.o rm -f $(PROGS) *.o
rm -rf tmp_check rm -rf tmp_check
for d in $(DIRS); do \ for d in $(DIRS); do \

View File

@ -25,5 +25,5 @@ check:
installcheck: installcheck:
$(prove_installcheck) $(prove_installcheck)
clean distclean: clean distclean maintainer-clean:
rm -rf tmp_check rm -rf tmp_check

Some files were not shown because too many files have changed in this diff Show More