Prevent a backend crash when processing CREATE TABLE commands with

more than 65K columns, or when the created table has more than 65K columns
due to adding inherited columns from parent relations. Fix a similar
crash when processing SELECT queries with more than 65K target list
entries. In all three cases we would eventually detect the error and
elog, but the check was being made too late.
This commit is contained in:
Neil Conway 2004-11-16 23:34:26 +00:00
parent 8a1821ab5b
commit e1bf6527f6
2 changed files with 41 additions and 2 deletions

View File

@ -8,7 +8,7 @@
*
*
* IDENTIFICATION
* $PostgreSQL: pgsql/src/backend/commands/tablecmds.c,v 1.139 2004/11/05 19:15:57 tgl Exp $
* $PostgreSQL: pgsql/src/backend/commands/tablecmds.c,v 1.140 2004/11/16 23:34:22 neilc Exp $
*
*-------------------------------------------------------------------------
*/
@ -680,6 +680,23 @@ MergeAttributes(List *schema, List *supers, bool istemp,
* defaults */
int child_attno;
/*
* Check for and reject tables with too many columns. We perform
* this check relatively early for two reasons: (a) we don't run
* the risk of overflowing an AttrNumber in subsequent code (b) an
* O(n^2) algorithm is okay if we're processing <= 1600 columns,
* but could take minutes to execute if the user attempts to
* create a table with hundreds of thousands of columns.
*
* Note that we also need to check that any we do not exceed this
* figure after including columns from inherited relations.
*/
if (list_length(schema) > MaxHeapAttributeNumber)
ereport(ERROR,
(errcode(ERRCODE_TOO_MANY_COLUMNS),
errmsg("tables can have at most %d columns",
MaxHeapAttributeNumber)));
/*
* Check for duplicate names in the explicit list of attributes.
*
@ -979,6 +996,16 @@ MergeAttributes(List *schema, List *supers, bool istemp,
}
schema = inhSchema;
/*
* Check that we haven't exceeded the legal # of columns after
* merging in inherited columns.
*/
if (list_length(schema) > MaxHeapAttributeNumber)
ereport(ERROR,
(errcode(ERRCODE_TOO_MANY_COLUMNS),
errmsg("tables can have at most %d columns",
MaxHeapAttributeNumber)));
}
/*

View File

@ -6,7 +6,7 @@
* Portions Copyright (c) 1996-2004, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* $PostgreSQL: pgsql/src/backend/parser/analyze.c,v 1.312 2004/09/27 04:12:02 neilc Exp $
* $PostgreSQL: pgsql/src/backend/parser/analyze.c,v 1.313 2004/11/16 23:34:26 neilc Exp $
*
*-------------------------------------------------------------------------
*/
@ -396,6 +396,18 @@ transformStmt(ParseState *pstate, Node *parseTree,
result->querySource = QSRC_ORIGINAL;
result->canSetTag = true;
/*
* Check that we did not produce too many resnos; at the very
* least we cannot allow more than 2^16, since that would exceed
* the range of a AttrNumber. It seems safest to use
* MaxTupleAttributeNumber.
*/
if (pstate->p_next_resno - 1 > MaxTupleAttributeNumber)
ereport(ERROR,
(errcode(ERRCODE_PROGRAM_LIMIT_EXCEEDED),
errmsg("target lists can have at most %d entries",
MaxTupleAttributeNumber)));
return result;
}