From 0ac98eedfef945403822d23e3efc9f7248602895 Mon Sep 17 00:00:00 2001 From: Amit Langote Date: Mon, 22 Sep 2025 16:19:26 +0900 Subject: [PATCH v2 8/8] WIP: Add ExecQualBatch() and EEOPs for batched quals Introduce ExecInitQualBatch()/ExecQualBatch() to evaluate scan quals over a TupleBatch. The batched qual interpreter produces a boolean mask aligned with the batch, marking which rows satisfy the qual. The scan node later uses this mask to copy only passing rows into its output slots. If batching is not possible, fall back to the existing per-tuple engine. Add EEOP_QUAL_BATCH_INITMASK and EEOP_QUAL_BATCH_TERM, and wire them after EEOP_SCAN_FETCHSOME_BATCH and EEOP_BUILD_SCAN_BATCH_VECTOR. Batching is limited to quals that are a top-level AND of simple clauses: either NullTest(var) or strict binary OpExpr with var/const or var/var arguments. A walker validates the tree, collects the referenced attnos, and builds a BatchVector; terms are compiled from the leaves and evaluated to update the mask. ExprState gains batch_private to hold BatchQualRuntime (mask, words) which are used by the parent node to populate output slots in TupleBatch. --- src/backend/executor/execExpr.c | 324 ++++++++++++++++++++++++++ src/backend/executor/execExprInterp.c | 202 ++++++++++++++++ src/backend/executor/nodeSeqscan.c | 2 + src/backend/jit/llvm/llvmjit_expr.c | 11 + src/backend/jit/llvm/llvmjit_types.c | 2 + src/include/executor/execExpr.h | 60 +++++ src/include/executor/execScan.h | 35 +-- src/include/executor/executor.h | 3 + src/include/nodes/execnodes.h | 4 + 9 files changed, 630 insertions(+), 13 deletions(-) diff --git a/src/backend/executor/execExpr.c b/src/backend/executor/execExpr.c index 27a5780f557..63df560d5f1 100644 --- a/src/backend/executor/execExpr.c +++ b/src/backend/executor/execExpr.c @@ -111,6 +111,19 @@ static BatchVector *BatchVectorCreate(Bitmapset *attnos, AttrNumber last_var); static bool ExprListAllSimpleVars(const List *args, Bitmapset **allattnos); static BatchVectorSlice *BatchVectorSliceFromExprArgs(const List *args, const BatchVector *bv); +static int16 BatchVectorFindAttColno(const BatchVector *bv, AttrNumber attno); +static int16 BatchVectorOffsetForVarExpr(Expr *expr, const BatchVector *bv); + +/* private context for the walker */ +typedef struct QualBatchContext +{ + List *leaves; /* List of accepted leaves */ + Bitmapset *attnos; /* Vars referenced by accepted leaves */ + bool ok; /* stays true if batchable */ + AttrNumber last_scan; /* last needed attribute in scan slot */ +} QualBatchContext; + +static bool qual_batchable_walker(Node *node, void *context); /* * ExecInitExpr: prepare an expression tree for execution @@ -5221,6 +5234,209 @@ ExprListAllSimpleVars(const List *args, Bitmapset **allattnos) return true; } +/* helper: extract Var (allowing RelabelType->Var); returns NULL if not */ +static Var * +strip_to_var(Node *n) +{ + if (n == NULL) + return NULL; + if (IsA(n, RelabelType)) + n = (Node *) ((RelabelType *) n)->arg; + if (!IsA(n, Var)) + return NULL; + if (((Var *) n)->varattno < 0) + return NULL; + return (Var *) n; +} + +/* main walker; return true to abort traversal early, false to continue */ +static bool +qual_batchable_walker(Node *node, void *context) +{ + QualBatchContext *cxt = (QualBatchContext *) context; + + if (node == NULL || !cxt->ok) + return false; + + switch (nodeTag(node)) + { + case T_List: + return expression_tree_walker(node, qual_batchable_walker, cxt); + + case T_BoolExpr: + { + BoolExpr *b = (BoolExpr *) node; + + /* Only AND trees are allowed */ + if (b->boolop != AND_EXPR) + { + cxt->ok = false; + return true; /* abort */ + } + /* Recurse normally over children */ + return expression_tree_walker(node, qual_batchable_walker, cxt); + } + + case T_NullTest: + { + NullTest *nt = (NullTest *) node; + Var *v = strip_to_var((Node *) nt->arg); + + if (v == NULL) + { + cxt->ok = false; + return true; + } + + cxt->attnos = bms_add_member(cxt->attnos, v->varattno); + if (v->varattno > cxt->last_scan) + cxt->last_scan = v->varattno; + cxt->leaves = lappend(cxt->leaves, node); + + /* Do NOT recurse into leaf */ + return false; + } + + case T_OpExpr: + { + OpExpr *op = (OpExpr *) node; + List *args = op->args; + Node *l, *r; + Var *lv, + *rv = NULL; + + /* binary only */ + if (list_length(args) != 2) + { + cxt->ok = false; + return true; + } + /* strict operator only (NULL -> false semantics) */ + if (!func_strict(op->opfuncid)) + { + cxt->ok = false; + return true; + } + + l = linitial(args); + r = lsecond(args); + lv = strip_to_var(l); + if (lv == NULL) + { + cxt->ok = false; + return true; + } + cxt->attnos = bms_add_member(cxt->attnos, lv->varattno); + if (lv->varattno > cxt->last_scan) + cxt->last_scan = lv->varattno; + + if (IsA(r, Const)) + { + /* ok; no attno to add */ + } + else + { + rv = strip_to_var(r); + if (rv == NULL) + { + cxt->ok = false; + return true; + } + cxt->attnos = bms_add_member(cxt->attnos, rv->varattno); + if (rv->varattno > cxt->last_scan) + cxt->last_scan = rv->varattno; + } + + cxt->leaves = lappend(cxt->leaves, node); + + /* Leaf handled; do NOT recurse into args */ + return false; + } + + /* Whitelist ends here; anything else in the tree rejects */ + default: + cxt->ok = false; + break; + } + + return true; +} + +/* build a BatchQualTerm from a validated leaf */ +static BatchQualTerm * +build_term_from_leaf(Node *n, BatchVector *bv) +{ + BatchQualTerm *term; + BatchQualTermKind kind; + bool strict; + int16 l_off; + int16 r_off; + Datum r_const = (Datum) 0; + bool r_isnull = false; + FmgrInfo *finfo = NULL; + Oid collation; + + if (IsA(n, NullTest)) + { + NullTest *nt = (NullTest *) n; + + kind = nt->nulltesttype == IS_NULL ? BQTK_IS_NULL : BQTK_IS_NOT_NULL; + l_off = BatchVectorOffsetForVarExpr(nt->arg, bv); + r_off = -1; + strict = false; + collation = InvalidOid; + + if (l_off < 0) + return NULL; + } + else if (IsA(n, OpExpr)) + { + OpExpr *op = (OpExpr *) n; + Expr *l = linitial(op->args); + Expr *r = lsecond(op->args); + + l_off = BatchVectorOffsetForVarExpr(l, bv); + if (l_off < 0) + return NULL; + + r_off = BatchVectorOffsetForVarExpr(r, bv); + if (IsA(r, Const)) + { + Const *c = (Const *) r; + + kind = BQTK_VAR_CONST; + r_const = c->constvalue; + r_isnull = c->constisnull; + r_off = -1; + } + else + { + if (r_off < 0) + return NULL; + kind = BQTK_VAR_VAR; + } + + strict = func_strict(op->opfuncid); + collation = exprInputCollation((Node *) op); + finfo = palloc(sizeof(FmgrInfo)); + fmgr_info(op->opfuncid, finfo); + } + else + return NULL; + + term = palloc(sizeof(BatchQualTerm)); + term->kind = kind; + term->strict = strict; + term->l_off = l_off; + term->r_off = r_off; + term->r_const = r_const; + term->r_isnull = r_isnull; + term->finfo = finfo; + term->collation = collation; + + return term; +} + /* ---------- BatchVector stuff ------------- */ static BatchVector * @@ -5298,3 +5514,111 @@ BatchVectorSliceFromExprArgs(const List *args, const BatchVector *bv) return bvs; } + +/* + * BatchVectorOffsetForVarExpr + * Map a Var (or RelabelType->Var) to its BatchVector column index. + * Returns -1 if the Var’s attno is not present. + */ +static int16 +BatchVectorOffsetForVarExpr(Expr *expr, const BatchVector *bv) +{ + AttrNumber attno; + + if (!expr_is_simple_var(expr, &attno)) + return -1; + + return (int16) BatchVectorFindAttColno(bv, attno); +} + +/* + * ExecInitQualBatch + * Build a batched-qual EEOP program (AND-only). + * Caller should also keep scalar ps->qual for runtime fallback. + */ +ExprState * +ExecInitQualBatch(PlanState *ps) +{ + Node *qual = (Node *) ps->plan->qual; + QualBatchContext cxt = {NIL, NULL, true, 0}; + BatchQualRuntime *rt; + ExprState *state; + BatchVector *bv; + uint64 *mask; + int mask_words; + ListCell *lc; + ExprEvalStep scratch = {0}; + + if (qual == NULL) + return NULL; + + /* validate + collect leaves/attnos with walker */ + (void) qual_batchable_walker(qual, &cxt); + if (!cxt.ok || cxt.leaves == NIL || bms_is_empty(cxt.attnos)) + return NULL; + + bv = BatchVectorCreate(cxt.attnos, cxt.last_scan); + + mask_words = (bv->maxrows + 63) >> 6; + mask = (uint64 *) palloc0(sizeof(uint64) * mask_words); + + /* Runtime carrier (lifetime == exprstate) */ + rt = palloc0(sizeof(BatchQualRuntime)); + rt->mask = mask; + rt->mask_words = mask_words; + + /* dedicated ExprState for batched program */ + + state = makeNode(ExprState); + state->expr = (Expr *) qual; + state->parent = ps; + state->ext_params = NULL; + + /* mark expression as to be used with ExecQual() */ + state->flags = EEO_FLAG_IS_QUAL; + + /* Only valid as batch qual if this is set. */ + state->batch_private = (void *) rt; + + scratch.opcode = EEOP_SCAN_FETCHSOME_BATCH; + scratch.d.fetch_batch.last_var = cxt.last_scan; + ExprEvalPushStep(state, &scratch); + + scratch.opcode = EEOP_BUILD_SCAN_BATCH_VECTOR; + scratch.d.batch_vector.bv = bv; + ExprEvalPushStep(state, &scratch); + + scratch.opcode = EEOP_QUAL_BATCH_INITMASK; + scratch.d.qualbatch_init.bv = bv; + scratch.d.qualbatch_init.mask = mask; + scratch.d.qualbatch_init.mask_words = mask_words; + ExprEvalPushStep(state, &scratch); + + /* TERM per leaf */ + foreach(lc, cxt.leaves) + { + BatchQualTerm *term = build_term_from_leaf((Node *) lfirst(lc), bv); + + if (term == NULL) + return NULL; + + scratch.opcode = EEOP_QUAL_BATCH_TERM; + scratch.d.qualbatch_term.bv = bv; + scratch.d.qualbatch_term.mask = mask; + scratch.d.qualbatch_term.mask_words = mask_words; + scratch.d.qualbatch_term.term = term; /* by value */ + ExprEvalPushStep(state, &scratch); + } + + /* + * At the end, we don't need to do anything more. The last qual expr must + * have yielded TRUE, and since its result is stored in the desired output + * location, we're done. + */ + scratch.opcode = EEOP_DONE_NO_RETURN; + ExprEvalPushStep(state, &scratch); + + ExecReadyExpr(state); + + return state; +} diff --git a/src/backend/executor/execExprInterp.c b/src/backend/executor/execExprInterp.c index 41ad9b4838d..5c2baa0e19d 100644 --- a/src/backend/executor/execExprInterp.c +++ b/src/backend/executor/execExprInterp.c @@ -608,6 +608,8 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull) &&CASE_EEOP_BUILD_SCAN_BATCH_VECTOR, &&CASE_EEOP_AGG_PLAIN_TRANS_BATCH_ROWLOOP, &&CASE_EEOP_AGG_PLAIN_TRANS_BATCH_DIRECT, + &&CASE_EEOP_QUAL_BATCH_INITMASK, + &&CASE_EEOP_QUAL_BATCH_TERM, &&CASE_EEOP_LAST }; @@ -2350,7 +2352,19 @@ ExecInterpExpr(ExprState *state, ExprContext *econtext, bool *isnull) { /* too complex for an inline implementation */ ExecAggPlainTransBatch(state, op, econtext); + EEO_NEXT(); + } + + EEO_CASE(EEOP_QUAL_BATCH_INITMASK) + { + ExecQualBatchInitMask(state, op, econtext); + EEO_NEXT(); + } + + EEO_CASE(EEOP_QUAL_BATCH_TERM) + { + ExecQualBatchTerm(state, op, econtext); EEO_NEXT(); } @@ -6185,3 +6199,191 @@ ExecAggPlainTransBatch(ExprState *state, ExprEvalStep *op, ExprContext *econtext elog(ERROR, "invalid ExprEvalOp in ExecAggPlainTransBatch()"); } } + +/* set mask bits [0..nvalid_bits) to 1; clear padding in the last word */ +static inline void +mask_init_all_ones(uint64 *a, int nwords, int nvalid_bits) +{ + for (int i = 0; i < nwords; i++) + a[i] = ~UINT64CONST(0); + + if ((nvalid_bits & 63) != 0) + { + int rem = nvalid_bits & 63; + + a[nwords - 1] &= (~UINT64CONST(0)) >> (64 - rem); + } +} + +static inline void +mask_clear_bit(uint64 *a, int i) +{ + a[i >> 6] &= ~(UINT64CONST(1) << (i & 63)); +} + +void +ExecQualBatchInitMask(ExprState *state, ExprEvalStep *op, ExprContext *econtext) +{ + BatchVector *bv = op->d.qualbatch_init.bv; + uint64 *mask = op->d.qualbatch_init.mask; + int nwords = op->d.qualbatch_init.mask_words; + int n = bv->nrows; + + /* initialize to all-pass for current batch size */ + mask_init_all_ones(mask, nwords, n); +} + +void +ExecQualBatchTerm(ExprState *state, ExprEvalStep *op, ExprContext *econtext) +{ + BatchVector *bv = op->d.qualbatch_term.bv; + uint64 *mask = op->d.qualbatch_term.mask; + BatchQualTerm *t = op->d.qualbatch_term.term; + int n = bv->nrows; + + switch (t->kind) + { + case BQTK_IS_NULL: + { + /* keep bit set only if value IS NULL; clear otherwise */ + for (int i = 0; i < n; i++) + { + if (!bv->nulls[t->l_off][i]) + mask_clear_bit(mask, i); + } + break; + } + + case BQTK_IS_NOT_NULL: + { + /* keep bit set only if value IS NOT NULL; clear if NULL */ + for (int i = 0; i < n; i++) + { + if (bv->nulls[t->l_off][i]) + mask_clear_bit(mask, i); + } + break; + } + + case BQTK_VAR_CONST: + { + const bool r_isnull = t->r_isnull; + const Datum r_const = t->r_const; + const bool strict = t->strict; + const Oid coll = t->collation; + FmgrInfo *finfo = t->finfo; + int loff = t->l_off; + + for (int i = 0; i < n; i++) + { + bool ln = bv->nulls[loff][i]; + bool pass; + + /* WHERE treats NULL as false; strict ops short-circuit */ + if (strict && (ln || r_isnull)) + pass = false; + else + { + Datum lv = bv->cols[loff][i]; + + /* fast-paths could go here based on t->fastclass */ + + pass = DatumGetBool(FunctionCall2Coll(finfo, coll, lv, r_const)); + } + + if (!pass) + mask_clear_bit(mask, i); + } + break; + } + + case BQTK_VAR_VAR: + { + const bool strict = t->strict; + const Oid coll = t->collation; + FmgrInfo *finfo = t->finfo; + int loff = t->l_off; + int roff = t->r_off; + + for (int i = 0; i < n; i++) + { + bool ln = bv->nulls[loff][i]; + bool rn = bv->nulls[roff][i]; + bool pass; + + if (strict && (ln || rn)) + pass = false; + else + { + Datum lv = bv->cols[loff][i]; + Datum rv = bv->cols[roff][i]; + + /* fast-paths could go here based on t->fastclass */ + + pass = DatumGetBool(FunctionCall2Coll(finfo, coll, lv, rv)); + } + + if (!pass) + mask_clear_bit(mask, i); + } + break; + } + + default: + /* should not happen; leave mask unchanged */ + break; + } +} + +static inline bool +mask_is_empty(const uint64 *mask, int nwords) +{ + for (int i = 0; i < nwords; i++) + { + if (mask[i] != 0) + return false; + } + return true; +} + +/* + * ExecQualBatch + * Evaluate a compiled qual (EEOP_QUAL) for a batch of rows. + * + * Returns the number of true rows (optional convenience for callers). + */ +int +ExecQualBatch(ExprState *state, ExprContext *econtext, TupleBatch *b) +{ + int i; + uint64 *mask; + int kept = 0; + BatchQualRuntime *rt = ExecGetBatchQualRuntime(state);; + + /* verify that expression was compiled using ExecInitQual */ + Assert(state->flags & EEO_FLAG_IS_QUAL); + Assert(rt && rt->mask && rt->mask_words); + + /* run the batched EEOP program once */ + econtext->scan_batch = b; + ExecEvalExprNoReturn(state, econtext); + + mask = rt->mask; + if (mask_is_empty(mask, rt->mask_words)) + return 0; + + /* Add survivors into outslots */ + TupleBatchRewind(b); + i = 0; + while (TupleBatchHasMore(b)) + { + TupleTableSlot *slot = TupleBatchGetNextSlot(b); + + /* mask bit set => row survives */ + if (mask[i >> 6] & (UINT64CONST(1) << (i & 63))) + TupleBatchStoreInOut(b, kept++, slot); + i++; + } + + return kept; +} diff --git a/src/backend/executor/nodeSeqscan.c b/src/backend/executor/nodeSeqscan.c index a4cf1e51af0..e5ca619731f 100644 --- a/src/backend/executor/nodeSeqscan.c +++ b/src/backend/executor/nodeSeqscan.c @@ -401,6 +401,8 @@ SeqScanInitBatching(SeqScanState *scanstate, int eflags) scanstate->ss.ps.ExecProcNode = ExecSeqScanBatchSlotWithQualProject; } } + + scanstate->ss.ps.qual_batch = ExecInitQualBatch((PlanState *) scanstate); } /* ---------------------------------------------------------------- diff --git a/src/backend/jit/llvm/llvmjit_expr.c b/src/backend/jit/llvm/llvmjit_expr.c index 45346124bd7..b97d5faebde 100644 --- a/src/backend/jit/llvm/llvmjit_expr.c +++ b/src/backend/jit/llvm/llvmjit_expr.c @@ -3033,6 +3033,17 @@ llvm_compile_expr(ExprState *state) LLVMBuildBr(b, opblocks[opno + 1]); break; + case EEOP_QUAL_BATCH_INITMASK: + build_EvalXFunc(b, mod, "ExecQualBatchInitMask", + v_state, op, v_econtext); + LLVMBuildBr(b, opblocks[opno + 1]); + break; + case EEOP_QUAL_BATCH_TERM: + build_EvalXFunc(b, mod, "ExecQualBatchTerm", + v_state, op, v_econtext); + LLVMBuildBr(b, opblocks[opno + 1]); + break; + case EEOP_LAST: Assert(false); break; diff --git a/src/backend/jit/llvm/llvmjit_types.c b/src/backend/jit/llvm/llvmjit_types.c index 1b5e06f60cc..f4f756e7cb5 100644 --- a/src/backend/jit/llvm/llvmjit_types.c +++ b/src/backend/jit/llvm/llvmjit_types.c @@ -187,4 +187,6 @@ void *referenced_functions[] = ExecBuildOuterBatchVector, ExecBuildScanBatchVector, ExecAggPlainTransBatch, + ExecQualBatchInitMask, + ExecQualBatchTerm, }; diff --git a/src/include/executor/execExpr.h b/src/include/executor/execExpr.h index f24782ecf58..f50936acaaa 100644 --- a/src/include/executor/execExpr.h +++ b/src/include/executor/execExpr.h @@ -306,6 +306,10 @@ typedef enum ExprEvalOp EEOP_AGG_PLAIN_TRANS_BATCH_ROWLOOP, /* per-row fmgr calls */ EEOP_AGG_PLAIN_TRANS_BATCH_DIRECT, /* call transfn once with AggBulkArgs */ + /* Batched qual evaluation */ + EEOP_QUAL_BATCH_INITMASK, + EEOP_QUAL_BATCH_TERM, + /* non-existent operation, used e.g. to check array lengths */ EEOP_LAST } ExprEvalOp; @@ -796,6 +800,21 @@ typedef struct ExprEvalStep { struct BatchVector *bv; } batch_vector; + + struct + { + struct BatchVector *bv; /* filled earlier by BUILD_BATCH_VECTOR */ + uint64 *mask; /* shared mask buffer for this program */ + int mask_words; /* ceil(es_max_batch/64) */ + } qualbatch_init; /* EEOP_QUAL_BATCH_INITMASK */ + + struct + { + struct BatchVector *bv; /* same bv as init */ + uint64 *mask; /* same mask buffer */ + int mask_words; /* same word count */ + struct BatchQualTerm *term; /* compiled leaf */ + } qualbatch_term; /* EEOP_QUAL_BATCH_TERM */ } d; } ExprEvalStep; @@ -975,4 +994,45 @@ extern void ExecBuildOuterBatchVector(ExprState *state, ExprEvalStep *op, ExprCo extern void ExecBuildScanBatchVector(ExprState *state, ExprEvalStep *op, ExprContext *econtext); extern void ExecAggPlainTransBatch(ExprState *state, ExprEvalStep *op, ExprContext *econtext); + +/* See ExecQualBatchTerm(). */ +typedef enum BatchQualTermKind +{ + BQTK_VAR_CONST, + BQTK_VAR_VAR, + BQTK_IS_NULL, + BQTK_IS_NOT_NULL, +} BatchQualTermKind; + +typedef struct BatchQualTerm +{ + BatchQualTermKind kind; + bool strict; /* follow strict NULL semantics if true */ + int16 l_off; /* left VAR column (index into BatchVector) */ + int16 r_off; /* right VAR column, or -1 if Const */ + Datum r_const; /* for VAR_CONST */ + bool r_isnull; /* for VAR_CONST */ + FmgrInfo *finfo; /* fmgr for generic binary ops */ + Oid collation; /* op collation */ +} BatchQualTerm; + +/* + * Runtime view for batched qual programs. + * Owned by the ExprState; lifetime == ExprState. + */ +typedef struct BatchQualRuntime +{ + uint64 *mask; + int mask_words; +} BatchQualRuntime; + +static inline BatchQualRuntime * +ExecGetBatchQualRuntime(ExprState *batch_qual) +{ + return (BatchQualRuntime *) batch_qual->batch_private; +} + +extern void ExecQualBatchInitMask(ExprState *state, ExprEvalStep *op, ExprContext *econtext); +extern void ExecQualBatchTerm(ExprState *state, ExprEvalStep *op, ExprContext *econtext); + #endif /* EXEC_EXPR_H */ diff --git a/src/include/executor/execScan.h b/src/include/executor/execScan.h index fb4b57a831c..568a7a33b7d 100644 --- a/src/include/executor/execScan.h +++ b/src/include/executor/execScan.h @@ -304,7 +304,8 @@ ExecScanExtendedBatch(ScanState *node, { ExprContext *econtext = node->ps.ps_ExprContext; TupleBatch *b = node->ps.ps_Batch; - int qualified; + ExprState *qual_batch = node->ps.qual_batch; + int qualified = 0; /* Batch path does not support EPQ */ Assert(node->ps.state->es_epq_active == NULL); @@ -320,23 +321,31 @@ ExecScanExtendedBatch(ScanState *node, if (qual != NULL) { - qualified = 0; - while (TupleBatchHasMore(b)) + ResetExprContext(econtext); + if (qual_batch) { - TupleTableSlot *in = TupleBatchGetNextSlot(b); - - Assert(in); - ResetExprContext(econtext); - econtext->ecxt_scantuple = in; + qualified = ExecQualBatch(qual_batch, econtext, b); + } + else + { + int i = 0; - if (ExecQual(qual, econtext)) + while (TupleBatchHasMore(b)) { - TupleBatchStoreInOut(b, qualified, in); - qualified++; + TupleTableSlot *slot = TupleBatchGetNextSlot(b); + + Assert(slot); + econtext->ecxt_scantuple = slot; + if (ExecQual(qual, econtext)) + { + TupleBatchStoreInOut(b, qualified, slot); + qualified++; + } + i++; } - else - InstrCountFiltered1(node, 1); } + InstrCountFiltered1(node, b->nvalid - qualified); + /* Update count and start using b->outslots. */ TupleBatchUseOutput(b, qualified); } else diff --git a/src/include/executor/executor.h b/src/include/executor/executor.h index c72bd755b79..dd0f2c74ae5 100644 --- a/src/include/executor/executor.h +++ b/src/include/executor/executor.h @@ -333,6 +333,7 @@ ExecProcNodeBatch(PlanState *node) extern ExprState *ExecInitExpr(Expr *node, PlanState *parent); extern ExprState *ExecInitExprWithParams(Expr *node, ParamListInfo ext_params); extern ExprState *ExecInitQual(List *qual, PlanState *parent); +extern ExprState *ExecInitQualBatch(PlanState *ps); extern ExprState *ExecInitCheck(List *qual, PlanState *parent); extern List *ExecInitExprList(List *nodes, PlanState *parent); extern ExprState *ExecBuildAggTrans(AggState *aggstate, struct AggStatePerPhaseData *phase, @@ -581,6 +582,8 @@ AggGetBulkArgs(FunctionCallInfo fcinfo) } #endif +extern int ExecQualBatch(ExprState *state, ExprContext *econtext, TupleBatch *b); + extern bool ExecCheck(ExprState *state, ExprContext *econtext); /* diff --git a/src/include/nodes/execnodes.h b/src/include/nodes/execnodes.h index fdfe8b4ddaf..78c5abbb23a 100644 --- a/src/include/nodes/execnodes.h +++ b/src/include/nodes/execnodes.h @@ -146,6 +146,9 @@ typedef struct ExprState * ExecInitExprRec(). */ ErrorSaveContext *escontext; + + /* batched-program runtime (e.g., BatchQualRuntime) */ + void *batch_private; } ExprState; @@ -1196,6 +1199,7 @@ typedef struct PlanState * subPlan list, which does not exist in the plan tree). */ ExprState *qual; /* boolean qual condition */ + ExprState *qual_batch; /* boolean qual condition evaluated on batches */ PlanState *lefttree; /* input plan tree(s) */ PlanState *righttree; -- 2.43.0