aboutsummaryrefslogtreecommitdiff
path: root/src/backend/optimizer
diff options
context:
space:
mode:
Diffstat (limited to 'src/backend/optimizer')
-rw-r--r--src/backend/optimizer/path/indxpath.c121
-rw-r--r--src/backend/optimizer/util/pathnode.c100
2 files changed, 63 insertions, 158 deletions
diff --git a/src/backend/optimizer/path/indxpath.c b/src/backend/optimizer/path/indxpath.c
index 2a50272da6b..bcb1bc6097d 100644
--- a/src/backend/optimizer/path/indxpath.c
+++ b/src/backend/optimizer/path/indxpath.c
@@ -122,7 +122,6 @@ static Cost bitmap_and_cost_est(PlannerInfo *root, RelOptInfo *rel,
List *paths);
static PathClauseUsage *classify_index_clause_usage(Path *path,
List **clauselist);
-static Relids get_bitmap_tree_required_outer(Path *bitmapqual);
static void find_indexpath_quals(Path *bitmapqual, List **quals, List **preds);
static int find_list_position(Node *node, List **nodelist);
static bool check_index_only(RelOptInfo *rel, IndexOptInfo *index);
@@ -357,23 +356,16 @@ create_index_paths(PlannerInfo *root, RelOptInfo *rel)
*/
if (bitjoinpaths != NIL)
{
- List *path_outer;
List *all_path_outers;
ListCell *lc;
- /*
- * path_outer holds the parameterization of each path in bitjoinpaths
- * (to save recalculating that several times), while all_path_outers
- * holds all distinct parameterization sets.
- */
- path_outer = all_path_outers = NIL;
+ /* Identify each distinct parameterization seen in bitjoinpaths */
+ all_path_outers = NIL;
foreach(lc, bitjoinpaths)
{
Path *path = (Path *) lfirst(lc);
- Relids required_outer;
+ Relids required_outer = PATH_REQ_OUTER(path);
- required_outer = get_bitmap_tree_required_outer(path);
- path_outer = lappend(path_outer, required_outer);
if (!bms_equal_any(required_outer, all_path_outers))
all_path_outers = lappend(all_path_outers, required_outer);
}
@@ -388,16 +380,14 @@ create_index_paths(PlannerInfo *root, RelOptInfo *rel)
double loop_count;
BitmapHeapPath *bpath;
ListCell *lcp;
- ListCell *lco;
/* Identify all the bitmap join paths needing no more than that */
this_path_set = NIL;
- forboth(lcp, bitjoinpaths, lco, path_outer)
+ foreach(lcp, bitjoinpaths)
{
Path *path = (Path *) lfirst(lcp);
- Relids p_outers = (Relids) lfirst(lco);
- if (bms_is_subset(p_outers, max_outers))
+ if (bms_is_subset(PATH_REQ_OUTER(path), max_outers))
this_path_set = lappend(this_path_set, path);
}
@@ -411,7 +401,7 @@ create_index_paths(PlannerInfo *root, RelOptInfo *rel)
bitmapqual = choose_bitmap_and(root, rel, this_path_set);
/* And push that path into the mix */
- required_outer = get_bitmap_tree_required_outer(bitmapqual);
+ required_outer = PATH_REQ_OUTER(bitmapqual);
loop_count = get_loop_count(root, rel->relid, required_outer);
bpath = create_bitmap_heap_path(root, rel, bitmapqual,
required_outer, loop_count, 0);
@@ -1601,25 +1591,19 @@ path_usage_comparator(const void *a, const void *b)
/*
* Estimate the cost of actually executing a bitmap scan with a single
- * index path (no BitmapAnd, at least not at this level; but it could be
- * a BitmapOr).
+ * index path (which could be a BitmapAnd or BitmapOr node).
*/
static Cost
bitmap_scan_cost_est(PlannerInfo *root, RelOptInfo *rel, Path *ipath)
{
BitmapHeapPath bpath;
- Relids required_outer;
-
- /* Identify required outer rels, in case it's a parameterized scan */
- required_outer = get_bitmap_tree_required_outer(ipath);
/* Set up a dummy BitmapHeapPath */
bpath.path.type = T_BitmapHeapPath;
bpath.path.pathtype = T_BitmapHeapScan;
bpath.path.parent = rel;
bpath.path.pathtarget = rel->reltarget;
- bpath.path.param_info = get_baserel_parampathinfo(root, rel,
- required_outer);
+ bpath.path.param_info = ipath->param_info;
bpath.path.pathkeys = NIL;
bpath.bitmapqual = ipath;
@@ -1628,10 +1612,13 @@ bitmap_scan_cost_est(PlannerInfo *root, RelOptInfo *rel, Path *ipath)
* Parallel bitmap heap path will be considered at later stage.
*/
bpath.path.parallel_workers = 0;
+
+ /* Now we can do cost_bitmap_heap_scan */
cost_bitmap_heap_scan(&bpath.path, root, rel,
bpath.path.param_info,
ipath,
- get_loop_count(root, rel->relid, required_outer));
+ get_loop_count(root, rel->relid,
+ PATH_REQ_OUTER(ipath)));
return bpath.path.total_cost;
}
@@ -1643,46 +1630,15 @@ bitmap_scan_cost_est(PlannerInfo *root, RelOptInfo *rel, Path *ipath)
static Cost
bitmap_and_cost_est(PlannerInfo *root, RelOptInfo *rel, List *paths)
{
- BitmapAndPath apath;
- BitmapHeapPath bpath;
- Relids required_outer;
-
- /* Set up a dummy BitmapAndPath */
- apath.path.type = T_BitmapAndPath;
- apath.path.pathtype = T_BitmapAnd;
- apath.path.parent = rel;
- apath.path.pathtarget = rel->reltarget;
- apath.path.param_info = NULL; /* not used in bitmap trees */
- apath.path.pathkeys = NIL;
- apath.bitmapquals = paths;
- cost_bitmap_and_node(&apath, root);
-
- /* Identify required outer rels, in case it's a parameterized scan */
- required_outer = get_bitmap_tree_required_outer((Path *) &apath);
-
- /* Set up a dummy BitmapHeapPath */
- bpath.path.type = T_BitmapHeapPath;
- bpath.path.pathtype = T_BitmapHeapScan;
- bpath.path.parent = rel;
- bpath.path.pathtarget = rel->reltarget;
- bpath.path.param_info = get_baserel_parampathinfo(root, rel,
- required_outer);
- bpath.path.pathkeys = NIL;
- bpath.bitmapqual = (Path *) &apath;
+ BitmapAndPath *apath;
/*
- * Check the cost of temporary path without considering parallelism.
- * Parallel bitmap heap path will be considered at later stage.
+ * Might as well build a real BitmapAndPath here, as the work is slightly
+ * too complicated to be worth repeating just to save one palloc.
*/
- bpath.path.parallel_workers = 0;
-
- /* Now we can do cost_bitmap_heap_scan */
- cost_bitmap_heap_scan(&bpath.path, root, rel,
- bpath.path.param_info,
- (Path *) &apath,
- get_loop_count(root, rel->relid, required_outer));
+ apath = create_bitmap_and_path(root, rel, paths);
- return bpath.path.total_cost;
+ return bitmap_scan_cost_est(root, rel, (Path *) apath);
}
@@ -1754,49 +1710,6 @@ classify_index_clause_usage(Path *path, List **clauselist)
/*
- * get_bitmap_tree_required_outer
- * Find the required outer rels for a bitmap tree (index/and/or)
- *
- * We don't associate any particular parameterization with a BitmapAnd or
- * BitmapOr node; however, the IndexPaths have parameterization info, in
- * their capacity as standalone access paths. The parameterization required
- * for the bitmap heap scan node is the union of rels referenced in the
- * child IndexPaths.
- */
-static Relids
-get_bitmap_tree_required_outer(Path *bitmapqual)
-{
- Relids result = NULL;
- ListCell *lc;
-
- if (IsA(bitmapqual, IndexPath))
- {
- return bms_copy(PATH_REQ_OUTER(bitmapqual));
- }
- else if (IsA(bitmapqual, BitmapAndPath))
- {
- foreach(lc, ((BitmapAndPath *) bitmapqual)->bitmapquals)
- {
- result = bms_join(result,
- get_bitmap_tree_required_outer((Path *) lfirst(lc)));
- }
- }
- else if (IsA(bitmapqual, BitmapOrPath))
- {
- foreach(lc, ((BitmapOrPath *) bitmapqual)->bitmapquals)
- {
- result = bms_join(result,
- get_bitmap_tree_required_outer((Path *) lfirst(lc)));
- }
- }
- else
- elog(ERROR, "unrecognized node type: %d", nodeTag(bitmapqual));
-
- return result;
-}
-
-
-/*
* find_indexpath_quals
*
* Given the Path structure for a plain or bitmap indexscan, extract lists
diff --git a/src/backend/optimizer/util/pathnode.c b/src/backend/optimizer/util/pathnode.c
index e845a4b1ae1..5110a6b8060 100644
--- a/src/backend/optimizer/util/pathnode.c
+++ b/src/backend/optimizer/util/pathnode.c
@@ -1081,11 +1081,27 @@ create_bitmap_and_path(PlannerInfo *root,
List *bitmapquals)
{
BitmapAndPath *pathnode = makeNode(BitmapAndPath);
+ Relids required_outer = NULL;
+ ListCell *lc;
pathnode->path.pathtype = T_BitmapAnd;
pathnode->path.parent = rel;
pathnode->path.pathtarget = rel->reltarget;
- pathnode->path.param_info = NULL; /* not used in bitmap trees */
+
+ /*
+ * Identify the required outer rels as the union of what the child paths
+ * depend on. (Alternatively, we could insist that the caller pass this
+ * in, but it's more convenient and reliable to compute it here.)
+ */
+ foreach(lc, bitmapquals)
+ {
+ Path *bitmapqual = (Path *) lfirst(lc);
+
+ required_outer = bms_add_members(required_outer,
+ PATH_REQ_OUTER(bitmapqual));
+ }
+ pathnode->path.param_info = get_baserel_parampathinfo(root, rel,
+ required_outer);
/*
* Currently, a BitmapHeapPath, BitmapAndPath, or BitmapOrPath will be
@@ -1117,11 +1133,27 @@ create_bitmap_or_path(PlannerInfo *root,
List *bitmapquals)
{
BitmapOrPath *pathnode = makeNode(BitmapOrPath);
+ Relids required_outer = NULL;
+ ListCell *lc;
pathnode->path.pathtype = T_BitmapOr;
pathnode->path.parent = rel;
pathnode->path.pathtarget = rel->reltarget;
- pathnode->path.param_info = NULL; /* not used in bitmap trees */
+
+ /*
+ * Identify the required outer rels as the union of what the child paths
+ * depend on. (Alternatively, we could insist that the caller pass this
+ * in, but it's more convenient and reliable to compute it here.)
+ */
+ foreach(lc, bitmapquals)
+ {
+ Path *bitmapqual = (Path *) lfirst(lc);
+
+ required_outer = bms_add_members(required_outer,
+ PATH_REQ_OUTER(bitmapqual));
+ }
+ pathnode->path.param_info = get_baserel_parampathinfo(root, rel,
+ required_outer);
/*
* Currently, a BitmapHeapPath, BitmapAndPath, or BitmapOrPath will be
@@ -3885,7 +3917,18 @@ do { \
!bms_overlap(PATH_REQ_OUTER(path), child_rel->top_parent_relids))
return path;
- /* Reparameterize a copy of given path. */
+ /*
+ * If possible, reparameterize the given path, making a copy.
+ *
+ * This function is currently only applied to the inner side of a nestloop
+ * join that is being partitioned by the partitionwise-join code. Hence,
+ * we need only support path types that plausibly arise in that context.
+ * (In particular, supporting sorted path types would be a waste of code
+ * and cycles: even if we translated them here, they'd just lose in
+ * subsequent cost comparisons.) If we do see an unsupported path type,
+ * that just means we won't be able to generate a partitionwise-join plan
+ * using that path type.
+ */
switch (nodeTag(path))
{
case T_Path:
@@ -3932,16 +3975,6 @@ do { \
}
break;
- case T_TidPath:
- {
- TidPath *tpath;
-
- FLAT_COPY_PATH(tpath, path, TidPath);
- ADJUST_CHILD_ATTRS(tpath->tidquals);
- new_path = (Path *) tpath;
- }
- break;
-
case T_ForeignPath:
{
ForeignPath *fpath;
@@ -4032,37 +4065,6 @@ do { \
}
break;
- case T_MergeAppendPath:
- {
- MergeAppendPath *mapath;
-
- FLAT_COPY_PATH(mapath, path, MergeAppendPath);
- REPARAMETERIZE_CHILD_PATH_LIST(mapath->subpaths);
- new_path = (Path *) mapath;
- }
- break;
-
- case T_MaterialPath:
- {
- MaterialPath *mpath;
-
- FLAT_COPY_PATH(mpath, path, MaterialPath);
- REPARAMETERIZE_CHILD_PATH(mpath->subpath);
- new_path = (Path *) mpath;
- }
- break;
-
- case T_UniquePath:
- {
- UniquePath *upath;
-
- FLAT_COPY_PATH(upath, path, UniquePath);
- REPARAMETERIZE_CHILD_PATH(upath->subpath);
- ADJUST_CHILD_ATTRS(upath->uniq_exprs);
- new_path = (Path *) upath;
- }
- break;
-
case T_GatherPath:
{
GatherPath *gpath;
@@ -4073,16 +4075,6 @@ do { \
}
break;
- case T_GatherMergePath:
- {
- GatherMergePath *gmpath;
-
- FLAT_COPY_PATH(gmpath, path, GatherMergePath);
- REPARAMETERIZE_CHILD_PATH(gmpath->subpath);
- new_path = (Path *) gmpath;
- }
- break;
-
default:
/* We don't know how to reparameterize this path. */