diff --git a/CHANGELOG.md b/CHANGELOG.md index d423fdc58..6d91a4513 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,9 @@ Thank you to all who have contributed! - Adds public `tag` field to IR nodes for associating metadata - Adds AST Normalization Pass. - Adds PartiQLPlanner Interface, which is responsible for translate an AST to a Plan. +- **EXPERIMENTAL** Evaluation of `EXCLUDE` in the `EvaluatingCompiler` + - This is currently marked as experimental until the RFC is approved https://github.com/partiql/partiql-lang/issues/27 + - This will be added to the `PhysicalPlanCompiler` in an upcoming release ### Changed - StaticTypeInferencer and PlanTyper will not raise an error when an expression is inferred to `NULL` or `unionOf(NULL, MISSING)`. In these cases the StaticTypeInferencer and PlanTyper will still raise the Problem Code `ExpressionAlwaysReturnsNullOrMissing` but the severity of the problem has been changed to warning. In the case an expression always returns `MISSING`, problem code `ExpressionAlwaysReturnsMissing` will be raised, which will have problem severity of error. diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/EvaluatingCompiler.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/EvaluatingCompiler.kt index c2a9a5c79..e2bffb6b4 100644 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/EvaluatingCompiler.kt +++ b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/EvaluatingCompiler.kt @@ -20,6 +20,7 @@ import com.amazon.ion.IonValue import com.amazon.ion.Timestamp import com.amazon.ion.system.IonSystemBuilder import com.amazon.ionelement.api.MetaContainer +import com.amazon.ionelement.api.emptyMetaContainer import com.amazon.ionelement.api.ionBool import com.amazon.ionelement.api.toIonValue import org.partiql.errors.ErrorCode @@ -1829,6 +1830,8 @@ internal open class EvaluatingCompiler( val limitThunk = selectExpr.limit?.let { compileAstExpr(it) } val limitLocationMeta = selectExpr.limit?.metas?.sourceLocation + val excludeExprs = selectExpr.excludeClause?.let { compileExcludeClause(it) } + fun rowsWithOffsetAndLimit(rows: Sequence, env: Environment): Sequence { val rowsWithOffset = when (offsetThunk) { null -> rows @@ -1863,7 +1866,16 @@ internal open class EvaluatingCompiler( else -> evalOrderBy(sourcedRows, orderByThunk, orderByLocationMeta) } - val projectedRows = orderedRows.map { (joinedValues, projectEnv) -> + val excludedBindings = when (excludeExprs) { + null -> orderedRows + else -> { + orderedRows.map { row -> + FromProduction(values = row.values, env = evalExclude(row.env, excludeExprs)) + } + } + } + + val projectedRows = excludedBindings.map { (joinedValues, projectEnv) -> selectProjectionThunk(projectEnv, joinedValues) } @@ -1929,8 +1941,17 @@ internal open class EvaluatingCompiler( else -> evalOrderBy(sourceThunks(env), orderByThunk, orderByLocationMeta) } + val excludedBindings = when (excludeExprs) { + null -> orderedRows + else -> { + orderedRows.map { row -> + FromProduction(values = row.values, env = evalExclude(row.env, excludeExprs)) + } + } + } + val fromProductions: Sequence = - rowsWithOffsetAndLimit(orderedRows, env) + rowsWithOffsetAndLimit(excludedBindings, env) val registers = createRegisterBank() // note: the group key can be anything here because we only ever have a single @@ -1977,9 +1998,6 @@ internal open class EvaluatingCompiler( val havingThunk = selectExpr.having?.let { compileHaving(it) } - val filterHavingAndProject: (Environment, Group) -> ExprValue? = - createFilterHavingAndProjectClosure(havingThunk, selectProjectionThunk) - val getGroupEnv: (Environment, Group) -> Environment = createGetGroupEnvClosure(groupAsName) @@ -2026,10 +2044,30 @@ internal open class EvaluatingCompiler( else -> evalOrderBy(groupByEnvValuePairs, orderByThunk, orderByLocationMeta) } + // apply HAVING row filter + val havingGroupEnvPairs = when (havingThunk) { + null -> orderedGroupEnvPairs + else -> { + orderedGroupEnvPairs.filter { (groupByEnv, _) -> + val havingClauseResult = havingThunk(groupByEnv) + havingClauseResult.isNotUnknown() && havingClauseResult.booleanValue() + } + } + } + + // apply EXCLUDE expressions + val excludedBindings = when (excludeExprs) { + null -> havingGroupEnvPairs + else -> { + havingGroupEnvPairs.map { (groupByEnv, currentGroup) -> + Pair(evalExclude(groupByEnv, excludeExprs), currentGroup) + } + } + } // generate the final group by projection - val projectedRows = orderedGroupEnvPairs.mapNotNull { (groupByEnv, groupValue) -> - filterHavingAndProject(groupByEnv, groupValue) - }.asSequence().let { rowsWithOffsetAndLimit(it, env) } + val projectedRows = excludedBindings.map { (groupByEnv, currentGroup) -> + selectProjectionThunk(groupByEnv, listOf(currentGroup.key)) + }.let { rowsWithOffsetAndLimit(it, env) } // if order by is specified, return list otherwise bag when (orderByThunk) { @@ -2067,7 +2105,15 @@ internal open class EvaluatingCompiler( val asThunk = compileAstExpr(asExpr) val atThunk = compileAstExpr(atExpr) thunkFactory.thunkEnv(metas) { env -> - val sourceValue = rowsWithOffsetAndLimit(sourceThunks(env).asSequence(), env) + val excludedBindings = when (excludeExprs) { + null -> sourceThunks(env) + else -> { + sourceThunks(env).map { row -> + FromProduction(values = row.values, env = evalExclude(row.env, excludeExprs)) + } + } + } + val sourceValue = rowsWithOffsetAndLimit(excludedBindings, env) val seq = sourceValue .map { (_, env) -> Pair(asThunk(env), atThunk(env)) } .filter { (name, _) -> name.type.isText } @@ -2159,6 +2205,174 @@ internal open class EvaluatingCompiler( CompiledGroupByItem(alias.text.exprValue(), uniqueName, compileAstExpr(it.expr)) } + /** + * Represents all the compiled `EXCLUDE` paths that start with the same [CompiledExcludeExpr.root]. Notably, + * redundant paths (i.e. exclude paths that exclude values already excluded by other paths) will be removed. + */ + internal data class CompiledExcludeExpr(val root: PartiqlAst.Identifier, val exclusions: RemoveAndOtherSteps) + + /** + * Represents all the exclusions at the current level and other nested levels. + * + * The idea behind this data structure is that at a current level (i.e. path step index), we keep track of the + * - Exclude paths that have a final exclude step at the current level. This set of tuple attributes and collection + * indexes to remove at the current level is modeled as a set of exclude steps (i.e. [RemoveAndOtherSteps.remove]). + * - Exclude paths that have additional steps (their final step is at a deeper level). This is modeled as a mapping + * of exclude steps to other [RemoveAndOtherSteps] to group all exclude paths that share the same current step. + * + * For example, let's say we have exclude paths (ignoring the exclude path root) of + * a.b, + * x.y.z1, + * x.y.z2 + * ^ ^ ^ + * Level 1 2 3 + * + * These exclude paths would be converted to the following in [RemoveAndOtherSteps]. + * ``` + * // For demonstration purposes, the syntax '' corresponds to the exclude tuple attribute step of + * RemoveAndOtherSteps( // Level 1 (no exclusions at level 1) + * remove = emptySet(), + * steps = mapOf( + * 'a' to RemoveAndOtherSteps( // Level 2 for paths that have `'a'` in Level 1 + * remove = setOf('b'), // path `a.b` has final step at level 2 + * steps = emptyMap() + * ), + * 'x' to RemoveAndOtherSteps( // Level 2 for paths that have `'x'` in Level 1 + * remove = emptySet(), + * steps = mapOf( + * 'y' to RemoveAndOtherSteps( // Level 3 for paths that have `'y'` in Level 2 and `'x'` in Level 1 + * remove = setOf('z1', 'z2'), // paths `x.y.z1` and `x.y.z2` have final step at level 3 + * steps = emptyMap() + * ) + * ) + * ), + * ) + * ) + * ``` + */ + internal data class RemoveAndOtherSteps(val remove: Set, val steps: Map) { + companion object { + fun empty(): RemoveAndOtherSteps { + return RemoveAndOtherSteps(emptySet(), emptyMap()) + } + } + } + + private fun addToCompiledExcludeExprs(curCompiledExpr: RemoveAndOtherSteps, steps: List): RemoveAndOtherSteps { + // subsumption cases + // when steps.size == 1: possibly add to remove set + // when steps.size > 1: possibly add to steps map + val first = steps.first() + var entryRemove = curCompiledExpr.remove.toMutableSet() + var entrySteps = curCompiledExpr.steps.toMutableMap() + if (steps.size == 1) { + when (first) { + is PartiqlAst.ExcludeStep.ExcludeTupleAttr -> { + if (entryRemove.contains(PartiqlAst.build { excludeTupleWildcard() })) { + // contains wildcard; do not add; e.g. a.* and a.b -> keep a.* + } else { + // add to entries to remove + entryRemove.add(first) + // remove from other steps; e.g. a.b.c and a.b -> keep a.b + entrySteps.remove(first) + } + } + is PartiqlAst.ExcludeStep.ExcludeTupleWildcard -> { + entryRemove.add(first) + // remove all tuple attribute exclude steps + entryRemove = entryRemove.filterNot { + it is PartiqlAst.ExcludeStep.ExcludeTupleAttr + }.toMutableSet() + // remove all tuple attribute/wildcard exclude steps from deeper levels + entrySteps = entrySteps.filterNot { + it.key is PartiqlAst.ExcludeStep.ExcludeTupleAttr || it.key is PartiqlAst.ExcludeStep.ExcludeTupleWildcard + }.toMutableMap() + } + is PartiqlAst.ExcludeStep.ExcludeCollectionIndex -> { + if (entryRemove.contains(PartiqlAst.build { excludeCollectionWildcard() })) { + // contains wildcard; do not add; e.g a[*] and a[1] -> keep a[*] + } else { + // add to entries to remove + entryRemove.add(first) + // remove from other steps; e.g. a.b[2].c and a.b[2] -> keep a.b[2] + entrySteps.remove(first) + } + } + is PartiqlAst.ExcludeStep.ExcludeCollectionWildcard -> { + entryRemove.add(first) + // remove all collection index exclude steps + entryRemove = entryRemove.filterNot { + it is PartiqlAst.ExcludeStep.ExcludeCollectionIndex + }.toMutableSet() + // remove all collection index/wildcard exclude steps from deeper levels + entrySteps = entrySteps.filterNot { + it.key is PartiqlAst.ExcludeStep.ExcludeCollectionIndex || it.key is PartiqlAst.ExcludeStep.ExcludeCollectionWildcard + }.toMutableMap() + } + } + } else { + // remove at deeper level; need to check if first step is already removed in current step + when (first) { + is PartiqlAst.ExcludeStep.ExcludeTupleAttr -> { + if (entryRemove.contains(PartiqlAst.build { excludeTupleWildcard() }) || entryRemove.contains(first)) { + // remove set contains tuple wildcard or attr; do not add to other steps; + // e.g. a.* and a.b.c -> a.* + } else { + val existingEntry = entrySteps.getOrDefault(first, RemoveAndOtherSteps.empty()) + val newEntry = addToCompiledExcludeExprs(existingEntry, steps.drop(1)) + entrySteps[first] = newEntry + } + } + is PartiqlAst.ExcludeStep.ExcludeTupleWildcard -> { + if (entryRemove.any { it is PartiqlAst.ExcludeStep.ExcludeTupleWildcard }) { + // tuple wildcard at current level; do nothing + } else { + val existingEntry = entrySteps.getOrDefault(first, RemoveAndOtherSteps.empty()) + val newEntry = addToCompiledExcludeExprs(existingEntry, steps.drop(1)) + entrySteps[first] = newEntry + } + } + is PartiqlAst.ExcludeStep.ExcludeCollectionIndex -> { + if (entryRemove.contains(PartiqlAst.build { excludeCollectionWildcard() }) || entryRemove.contains(first)) { + // remove set contains collection wildcard or index; do not add to other steps; + // e.g. a[*] and a[*][1] -> a[*] + } else { + val existingEntry = entrySteps.getOrDefault(first, RemoveAndOtherSteps.empty()) + val newEntry = addToCompiledExcludeExprs(existingEntry, steps.drop(1)) + entrySteps[first] = newEntry + } + } + is PartiqlAst.ExcludeStep.ExcludeCollectionWildcard -> { + if (entryRemove.any { it is PartiqlAst.ExcludeStep.ExcludeCollectionWildcard }) { + // collection wildcard at current level; do nothing + } else { + val existingEntry = entrySteps.getOrDefault(first, RemoveAndOtherSteps.empty()) + val newEntry = addToCompiledExcludeExprs(existingEntry, steps.drop(1)) + entrySteps[first] = newEntry + } + } + } + } + return RemoveAndOtherSteps(entryRemove, entrySteps) + } + + /** + * Creates a list of compiled exclude expressions with each index of the resulting list corresponding to a different + * exclude path root. + */ + internal fun compileExcludeClause(excludeClause: PartiqlAst.ExcludeOp): List { + val excludeExprs = excludeClause.exprs + val compiledExcludeExprs = excludeExprs + .groupBy { it.root } + .map { (root, exclusions) -> + val compiledExclusions = exclusions.fold(RemoveAndOtherSteps.empty()) { acc, exclusion -> + addToCompiledExcludeExprs(acc, exclusion.steps) + } + CompiledExcludeExpr(root, compiledExclusions) + } + return compiledExcludeExprs + } + /** * Create a thunk that uses the compiled GROUP BY expressions to create the group key. */ @@ -2196,6 +2410,167 @@ internal open class EvaluatingCompiler( CompiledOrderByItem(comparator, compileAstExpr(it.expr)) } + /** + * Returns an [ExprValue] created from a sequence of [seq]. Requires [type] to be a sequence type + * (i.e. [ExprValueType.isSequence] == true). + */ + private fun newSequence(type: ExprValueType, seq: Sequence): ExprValue { + return when (type) { + ExprValueType.LIST -> ExprValue.newList(seq) + ExprValueType.BAG -> ExprValue.newBag(seq) + ExprValueType.SEXP -> ExprValue.newSexp(seq) + else -> error("Sequence type required") + } + } + + private fun excludeStructExprValue( + structExprValue: StructExprValue, + exclusions: RemoveAndOtherSteps + ): ExprValue { + val toRemove = exclusions.remove + val otherSteps = exclusions.steps + if (toRemove.any { it is PartiqlAst.ExcludeStep.ExcludeTupleWildcard }) { + // tuple wildcard at current level. return empty struct + return StructExprValue(sequence = emptySequence(), ordering = structExprValue.ordering) + } + val attrsToRemove = toRemove.filterIsInstance() + .map { it.attr.name.text } + .toSet() + val sequenceWithRemoved = structExprValue.mapNotNull { structField -> + if (attrsToRemove.contains(structField.name?.stringValue())) { + null + } else { + structField as NamedExprValue + } + } + val finalSequence = sequenceWithRemoved.map { structField -> + var expr = structField.value + val name = structField.name + // apply case-sensitive tuple attr exclusions + val structFieldCaseSensitiveKey = PartiqlAst.build { + excludeTupleAttr( + identifier( + name.stringValue(), + caseSensitive() + ) + ) + } + otherSteps[structFieldCaseSensitiveKey]?.let { + expr = excludeExprValue(expr, it) + } + // apply case-insensitive tuple attr exclusions + val structFieldCaseInsensitiveKey = PartiqlAst.build { + excludeTupleAttr( + identifier( + name.stringValue(), + caseInsensitive() + ) + ) + } + otherSteps[structFieldCaseInsensitiveKey]?.let { + expr = excludeExprValue(expr, it) + } + // apply tuple wildcard exclusions + val tupleWildcardKey = PartiqlAst.build { excludeTupleWildcard(emptyMetaContainer()) } + otherSteps[tupleWildcardKey]?.let { + expr = excludeExprValue(expr, it) + } + expr.namedValue(name) + } + return ExprValue.newStruct(values = finalSequence, ordering = structExprValue.ordering) + } + + private fun excludeCollectionExprValue( + initialExprValue: ExprValue, + exprValueType: ExprValueType, + exclusions: RemoveAndOtherSteps + ): ExprValue { + val toRemove = exclusions.remove + val otherSteps = exclusions.steps + if (toRemove.any { it is PartiqlAst.ExcludeStep.ExcludeCollectionWildcard }) { + // collection wildcard at current level. return empty collection + return newSequence(exprValueType, emptySequence()) + } else { + val indexesToRemove = toRemove.filterIsInstance() + .map { it.index.value } + .toSet() + val sequenceWithRemoved = initialExprValue.mapNotNull { element -> + if (indexesToRemove.contains(element.name?.longValue())) { + null + } else { + element + } + }.asSequence() + val finalSequence = sequenceWithRemoved.map { element -> + var expr = element + if (initialExprValue is ExprValue.Companion.ListExprValue || initialExprValue is ExprValue.Companion.SexpExprValue) { + element as NamedExprValue + val index = element.name.longValue() + // apply collection index exclusions for lists and sexps + val elementKey = PartiqlAst.build { + excludeCollectionIndex( + index + ) + } + otherSteps[elementKey]?.let { + expr = excludeExprValue(element.value, it) + } + } + // apply collection wildcard exclusions for lists, bags, and sexps + val collectionWildcardKey = PartiqlAst.build { excludeCollectionWildcard(emptyMetaContainer()) } + otherSteps[collectionWildcardKey]?.let { + expr = excludeExprValue(expr, it) + } + expr + } + return newSequence(exprValueType, finalSequence) + } + } + + private fun excludeExprValue(initialExprValue: ExprValue, exclusions: RemoveAndOtherSteps): ExprValue { + return when (initialExprValue) { + is NamedExprValue -> excludeExprValue(initialExprValue.value, exclusions) + is StructExprValue -> excludeStructExprValue(initialExprValue, exclusions) + is ExprValue.Companion.ListExprValue -> excludeCollectionExprValue(initialExprValue, ExprValueType.LIST, exclusions) + is ExprValue.Companion.BagExprValue -> excludeCollectionExprValue(initialExprValue, ExprValueType.BAG, exclusions) + is ExprValue.Companion.SexpExprValue -> excludeCollectionExprValue(initialExprValue, ExprValueType.SEXP, exclusions) + else -> { + initialExprValue + } + } + } + + private fun excludeBindings( + initialBindings: Bindings, + root: PartiqlAst.Identifier, + exclusions: RemoveAndOtherSteps + ): Bindings { + val bindingNameString = root.name.text + val bindingName = BindingName(bindingNameString, root.case.toBindingCase()) + val bindingAtAttr = initialBindings[bindingName] + return if (bindingAtAttr != null) { + val newBindings = Bindings.buildLazyBindings { + val newExprValue = excludeExprValue(bindingAtAttr, exclusions) + addBinding(bindingNameString) { + newExprValue + } + } + newBindings.delegate(initialBindings) + } else { + initialBindings + } + } + + private fun evalExclude( + env: Environment, + excludeExprs: List + ): Environment { + val newBindings = excludeExprs.fold(env.current) { accBindings, expr -> + excludeBindings(accBindings, expr.root, expr.exclusions) + } + return env.nest(newLocals = newBindings) + } + private fun evalOrderBy( rows: Sequence, orderByItems: List, @@ -2271,33 +2646,6 @@ internal open class EvaluatingCompiler( } } - /** - * Returns a closure which performs the final projection and returns the - * result. If a HAVING clause was included, a different closure is returned - * that evaluates the HAVING clause and performs filtering. - */ - private fun createFilterHavingAndProjectClosure( - havingThunk: ThunkEnv?, - selectProjectionThunk: ThunkEnvValue> - ): (Environment, Group) -> ExprValue? = - when { - havingThunk != null -> { groupByEnv, currentGroup -> - // Create a closure that executes the HAVING clause and returns null if the - // HAVING criteria is not met - val havingClauseResult = havingThunk(groupByEnv) - if (havingClauseResult.isNotUnknown() && havingClauseResult.booleanValue()) { - selectProjectionThunk(groupByEnv, listOf(currentGroup.key)) - } else { - null - } - } - else -> { groupByEnv, currentGroup -> - // Create a closure that simply performs the final projection and - // returns the result. - selectProjectionThunk(groupByEnv, listOf(currentGroup.key)) - } - } - private fun compileCallAgg(expr: PartiqlAst.Expr.CallAgg, metas: MetaContainer): ThunkEnv { if (metas.containsKey(IsCountStarMeta.TAG) && currentCompilationContext.expressionContext != ExpressionContext.SELECT_LIST) { err( diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValue.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValue.kt index 83b1d20a5..29021b7df 100644 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValue.kt +++ b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValue.kt @@ -161,19 +161,19 @@ interface ExprValue : Iterable, Faceted { override fun bytesValue() = value } - private class ListExprValue(val values: Sequence) : BaseExprValue() { + internal class ListExprValue(val values: Sequence) : BaseExprValue() { override val type = ExprValueType.LIST override val ordinalBindings by lazy { OrdinalBindings.ofList(toList()) } override fun iterator() = values.mapIndexed { i, v -> v.namedValue(newInt(i)) }.iterator() } - private class BagExprValue(val values: Sequence) : BaseExprValue() { + internal class BagExprValue(val values: Sequence) : BaseExprValue() { override val type = ExprValueType.BAG override val ordinalBindings = OrdinalBindings.EMPTY override fun iterator() = values.iterator() } - private class SexpExprValue(val values: Sequence) : BaseExprValue() { + internal class SexpExprValue(val values: Sequence) : BaseExprValue() { override val type = ExprValueType.SEXP override val ordinalBindings by lazy { OrdinalBindings.ofList(toList()) } override fun iterator() = values.mapIndexed { i, v -> v.namedValue(newInt(i)) }.iterator() diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValueExtensions.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValueExtensions.kt index 9b5ce3239..953fe6c92 100644 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValueExtensions.kt +++ b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/ExprValueExtensions.kt @@ -96,12 +96,7 @@ fun ExprValue.asNamed(): Named = object : Named { } /** Binds the given name value as a [Named] facet delegate over this [ExprValue]. */ -fun ExprValue.namedValue(nameValue: ExprValue): ExprValue = object : ExprValue by this, Named { - override val name = nameValue - override fun asFacet(type: Class?): T? = - downcast(type) ?: this@namedValue.asFacet(type) - override fun toString(): String = stringify() -} +fun ExprValue.namedValue(nameValue: ExprValue): ExprValue = NamedExprValue(nameValue, this) /** Wraps this [ExprValue] in a delegate that always masks the [Named] facet. */ fun ExprValue.unnamedValue(): ExprValue = when (asFacet(Named::class.java)) { diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/Named.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/Named.kt index 2122f6873..2ea4199dd 100644 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/Named.kt +++ b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/Named.kt @@ -14,6 +14,8 @@ package org.partiql.lang.eval +import org.partiql.lang.util.downcast + /** * Facet for a value to indicate that it either has a name within some context * or an ordinal position. @@ -27,3 +29,12 @@ interface Named { */ val name: ExprValue } + +/** + * An [ExprValue] that also implements [Named]. + */ +internal class NamedExprValue(override val name: ExprValue, val value: ExprValue) : ExprValue by value, Named { + override fun asFacet(type: Class?): T? = downcast(type) ?: value.asFacet(type) + + override fun toString(): String = stringify() +} diff --git a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/StructExprValue.kt b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/StructExprValue.kt index 75eccc52f..bc0b78369 100644 --- a/partiql-lang/src/main/kotlin/org/partiql/lang/eval/StructExprValue.kt +++ b/partiql-lang/src/main/kotlin/org/partiql/lang/eval/StructExprValue.kt @@ -26,7 +26,7 @@ enum class StructOrdering { * Provides a [ExprValueType.STRUCT] implementation lazily backed by a sequence. */ internal open class StructExprValue( - private val ordering: StructOrdering, + internal val ordering: StructOrdering, private val sequence: Sequence ) : BaseExprValue() { diff --git a/partiql-lang/src/test/kotlin/org/partiql/lang/eval/EvaluatingCompilerExcludeTests.kt b/partiql-lang/src/test/kotlin/org/partiql/lang/eval/EvaluatingCompilerExcludeTests.kt new file mode 100644 index 000000000..3ab41e136 --- /dev/null +++ b/partiql-lang/src/test/kotlin/org/partiql/lang/eval/EvaluatingCompilerExcludeTests.kt @@ -0,0 +1,1077 @@ +package org.partiql.lang.eval + +import com.amazon.ionelement.api.emptyMetaContainer +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ArgumentsSource +import org.partiql.lang.domains.PartiqlAst +import org.partiql.lang.eval.evaluatortestframework.CompilerPipelineFactory +import org.partiql.lang.eval.evaluatortestframework.EvaluatorTestAdapter +import org.partiql.lang.eval.evaluatortestframework.EvaluatorTestCase +import org.partiql.lang.eval.evaluatortestframework.PipelineEvaluatorTestAdapter +import org.partiql.lang.syntax.PartiQLParserBuilder +import org.partiql.lang.util.ArgumentsProviderBase +import org.partiql.pig.runtime.LongPrimitive + +class EvaluatingCompilerExcludeTests : EvaluatorTestBase() { + + private val testHarness: EvaluatorTestAdapter = PipelineEvaluatorTestAdapter(CompilerPipelineFactory()) + + class ExcludeTests : ArgumentsProviderBase() { + override fun getParameters(): List = listOf( + EvaluatorTestCase( + "SELECT t.* EXCLUDE t.a FROM <<{'a': {'b': 2}, 'foo': 'bar', 'foo2': 'bar2'}>> AS t", + """<<{'foo': 'bar', 'foo2': 'bar2'}>>""" + ), + EvaluatorTestCase( // EXCLUDE tuple attr using bracket syntax; same output as above + "SELECT t.* EXCLUDE t['a'] FROM <<{'a': {'b': 2}, 'foo': 'bar', 'foo2': 'bar2'}>> AS t", + """<<{'foo': 'bar', 'foo2': 'bar2'}>>""" + ), + EvaluatorTestCase( // multiple binding tuples; select star + """ + SELECT * + EXCLUDE t.a FROM + << + {'a': {'b': 1}, 'foo': 'bar', 'foo2': 'bar1'}, + {'a': {'b': 2}, 'foo': 'bar', 'foo2': 'bar2'}, + {'a': {'b': 3}, 'foo': 'bar', 'foo2': 'bar3'} + >> AS t + """.trimIndent(), + """<< + {'foo': 'bar', 'foo2': 'bar1'}, + {'foo': 'bar', 'foo2': 'bar2'}, + {'foo': 'bar', 'foo2': 'bar3'} + >>""".trimMargin() + ), + EvaluatorTestCase( // multiple binding tuples; select list + """ + SELECT t.* + EXCLUDE t.a FROM + << + {'a': {'b': 1}, 'foo': 'bar', 'foo2': 'bar1'}, + {'a': {'b': 2}, 'foo': 'bar', 'foo2': 'bar2'}, + {'a': {'b': 3}, 'foo': 'bar', 'foo2': 'bar3'} + >> AS t + """.trimIndent(), + """<< + {'foo': 'bar', 'foo2': 'bar1'}, + {'foo': 'bar', 'foo2': 'bar2'}, + {'foo': 'bar', 'foo2': 'bar3'} + >>""".trimMargin() + ), + EvaluatorTestCase( // multiple binding tuples; + """ + SELECT VALUE t + EXCLUDE t.a FROM + << + {'a': {'b': 1}, 'foo': 'bar', 'foo2': 'bar1'}, + {'a': {'b': 2}, 'foo': 'bar', 'foo2': 'bar2'}, + {'a': {'b': 3}, 'foo': 'bar', 'foo2': 'bar3'} + >> AS t + """.trimIndent(), + """<< + {'foo': 'bar', 'foo2': 'bar1'}, + {'foo': 'bar', 'foo2': 'bar2'}, + {'foo': 'bar', 'foo2': 'bar3'} + >>""".trimMargin() + ), + EvaluatorTestCase( // EXCLUDE deeper nested field; no fields remaining + "SELECT t.* EXCLUDE t.a.b FROM <<{'a': {'b': 2}, 'foo': 'bar', 'foo2': 'bar2'}>> AS t", + """<<{'a': {}, 'foo': 'bar', 'foo2': 'bar2'}>>""" + ), + EvaluatorTestCase( // EXCLUDE deeper nested field; other field remaining + "SELECT t.* EXCLUDE t.a.b FROM <<{'a': {'b': 2, 'c': 3}, 'foo': 'bar', 'foo2': 'bar2'}>> AS t", + """<<{'a': {'c': 3}, 'foo': 'bar', 'foo2': 'bar2'}>>""" + ), + EvaluatorTestCase( // EXCLUDE multiple nested paths + "SELECT t.* EXCLUDE t.a.c, t.a.d, t.foo FROM <<{'a': {'b': 2, 'c': 3, 'd': 4}, 'foo': 'bar', 'foo2': 'bar2'}>> AS t", + """<<{'a': {'b': 2}, 'foo2': 'bar2'}>>""" + ), + EvaluatorTestCase( // EXCLUDE overlapping paths + """ + SELECT t.* + EXCLUDE t.a.c, t.a -- `t.a` and `t.a.c` overlap; still exclude `t.a` + FROM <<{'a': {'b': 2, 'c': 3, 'd': 4}, 'foo': 'bar', 'foo2': 'bar2'}>> AS t + """.trimIndent(), + """<<{'foo': 'bar', 'foo2': 'bar2'}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star + """SELECT * EXCLUDE c.ssn FROM [ + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109, + 'street': '123 Seaplane Dr.' + }, + 'ssn': 123456789 + } + ] AS c + """.trimIndent(), + """ + << + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109, + 'street': '123 Seaplane Dr.' + } + } + >>""" + ), + EvaluatorTestCase( // EXCLUDE select star with FROM source list + """SELECT * EXCLUDE c.ssn FROM [ + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109, + 'street': '123 Seaplane Dr.' + }, + 'ssn': 123456789 + } + ] AS c + """.trimIndent(), + """ + << + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109, + 'street': '123 Seaplane Dr.' + } + } + >>""" + ), + EvaluatorTestCase( // EXCLUDE select star with multiple paths and FROM source list + """ + SELECT * EXCLUDE c.ssn, c.address.street FROM [ + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109, + 'street': '123 Seaplane Dr.' + }, + 'ssn': 123456789 + } + ] AS c + """.trimIndent(), + """ + << + { + 'name': 'Alan', + 'custId': 1, + 'address': { + 'city': 'Seattle', + 'zipcode': 98109 + } + } + >>""" + ), + EvaluatorTestCase( // EXCLUDE select star list index and list index field + """ + SELECT * + EXCLUDE + t.a.b.c[0], + t.a.b.c[1].field + FROM [{ + 'a': { + 'b': { + 'c': [ + { + 'field': 0, -- c[0]; entire struct to be excluded + 'index': 0 + }, + { + 'field': 1, -- c[1]; `field` to be excluded + 'index': 1 + }, + { + 'field': 2, -- c[2]; field unchanged + 'index': 2 + } + ] + } + }, + 'foo': 'bar' + }] AS t + """, + """<<{'a': {'b': {'c': [{'index': 1}, {'field': 2, 'index': 2}]}}, 'foo': 'bar'}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection index as last step + """ + SELECT * + EXCLUDE + t.a.b.c[0] + FROM [{ + 'a': { + 'b': { + 'c': [0, 1, 2] + } + }, + 'foo': 'bar' + }] AS t + """, + """<<{'a': {'b': {'c': [1, 2]}}, 'foo': 'bar'}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection wildcard as last step on list + """ + SELECT * + EXCLUDE + t.a[*] + FROM [{ + 'a': [0, 1, 2] + }] AS t + """, + """<<{'a': []}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection wildcard and tuple path on list + """ + SELECT * + EXCLUDE + t.a.b.c[*].field_x + FROM [{ + 'a': { + 'b': { + 'c': [ + { -- c[0]; field_x to be removed + 'field_x': 0, + 'field_y': 0 + }, + { -- c[1]; field_x to be removed + 'field_x': 1, + 'field_y': 1 + }, + { -- c[2]; field_x to be removed + 'field_x': 2, + 'field_y': 2 + } + ] + } + }, + 'foo': 'bar' + }] AS t + """, + """<<{'a': {'b': {'c': [{'field_y': 0}, {'field_y': 1}, {'field_y': 2}]}}, 'foo': 'bar'}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star tuple wildcard as final step + """ + SELECT * + EXCLUDE + t.a.b.c[*].* + FROM [{ + 'a': { + 'b': { + 'c': [ + { -- c[0] + 'field_x': 0, + 'field_y': 0 + }, + { -- c[1] + 'field_x': 1, + 'field_y': 1 + }, + { -- c[2] + 'field_x': 2, + 'field_y': 2 + } + ] + } + }, + 'foo': 'bar' + }] AS t + """, + """<<{'a': {'b': {'c': [{}, {}, {}]}}, 'foo': 'bar'}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star order by + """ + SELECT * + EXCLUDE + t.a + FROM [ + { + 'a': 2, + 'foo': 'bar2' + }, + { + 'a': 1, + 'foo': 'bar1' + }, + { + 'a': 3, + 'foo': 'bar3' + } + ] AS t + ORDER BY t.a + """, + """[{'foo': 'bar1'}, {'foo': 'bar2'}, {'foo': 'bar3'}]""" + ), + EvaluatorTestCase( // EXCLUDE select star with JOIN + """ + SELECT * + EXCLUDE bar.d + FROM + << + {'a': 1, 'b': 11}, + {'a': 2, 'b': 22} + >> AS foo, + << + {'c': 3, 'd': 33}, + {'c': 4, 'd': 44} + >> AS bar + """, + """<<{'a': 1, 'b': 11, 'c': 3}, {'a': 1, 'b': 11, 'c': 4}, {'a': 2, 'b': 22, 'c': 3}, {'a': 2, 'b': 22, 'c': 4}>>""" + ), + EvaluatorTestCase( // EXCLUDE select list with multiple fields in FROM source struct + """ + SELECT t.b EXCLUDE t.b[*].b_1 + FROM << + { + 'a': {'a_1':1,'a_2':2}, + 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], -- every `b_1` to be excluded + 'c': 7, + 'd': 8 + } >> AS t + """, + """<<{'b': [{'b_2': 4}, {'b_2': 6}]}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star with multiple fields in FROM source struct + """ + SELECT * EXCLUDE t.b[*].b_1 + FROM << + { + 'a': {'a_1':1,'a_2':2}, + 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], -- every `b_1` to be excluded + 'c': 7, + 'd': 8 + } >> AS t + """, + """<<{'a': {'a_1': 1, 'a_2': 2}, 'b': [{'b_2': 4}, {'b_2': 6}], 'c': 7, 'd': 8}>>""" + ), + EvaluatorTestCase( // EXCLUDE select value with multiple fields in FROM source struct + """ + SELECT VALUE t.b EXCLUDE t.b[*].b_1 + FROM << + { + 'a': {'a_1':1,'a_2':2}, + 'b': [ {'b_1':3,'b_2':4}, {'b_1':5,'b_2':6} ], + 'c': 7, + 'd': 8 + } >> AS t + """, + """<<[{'b_2': 4}, {'b_2': 6}]>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection wildcard and nested tuple attr + """ + SELECT * EXCLUDE t.a[*].b.c + FROM << + { + 'a': [ -- `c` attr to be excluded from each element of `a` + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t + """, + """<<{'a': [{'b': {'d': 'zero'}}, {'b': {'d': 'one'}}, {'b': {'d': 'two'}}]}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection index and nested tuple attr + """ + SELECT * EXCLUDE t.a[1].b.c + FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, -- exclude `c` from just this index + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t + """, + """<<{'a': [{'b': {'c': 0, 'd': 'zero'}}, {'b': {'d': 'one'}}, {'b': {'c': 2, 'd': 'two'}}]}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection wildcard and nested tuple wildcard + """ + SELECT * EXCLUDE t.a[*].b.* + FROM << + { + 'a': [ -- exclude all of `b`'s attrs from each element of `a` + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t + """, + """<<{'a': [{'b': {}}, {'b': {}}, {'b': {}}]}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection index and nested tuple wildcard + """ + SELECT * EXCLUDE t.a[1].b.* + FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': 'zero' } }, + { 'b': { 'c': 1, 'd': 'one' } }, -- exclude all of `b`'s attrs from just this index + { 'b': { 'c': 2, 'd': 'two' } } + ] + } + >> AS t + """, + """<<{'a': [{'b': {'c': 0, 'd': 'zero'}}, {'b': {}}, {'b': {'c': 2, 'd': 'two'}}]}>>""" + ), + EvaluatorTestCase( // EXCLUDE select star collection wildcard and nested collection wildcard + """ + SELECT * EXCLUDE t.a[*].b.d[*].e + FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': [{'e': 'zero0', 'f': true}, {'e': 'zero1', 'f': false}] } }, -- all `e` to be excluded + { 'b': { 'c': 1, 'd': [{'e': 'one0', 'f': true}, {'e': 'one1', 'f': false}] } }, -- all `e` to be excluded + { 'b': { 'c': 2, 'd': [{'e': 'two0', 'f': true}, {'e': 'two1', 'f': false}] } } -- all `e` to be excluded + ] + } + >> AS t + """, + """ + << + { + 'a': [ + { 'b': { 'c': 0, 'd': [ { 'f': true }, { 'f': false } ] } }, + { 'b': { 'c': 1, 'd': [ { 'f': true }, { 'f': false } ] } }, + { 'b': { 'c': 2, 'd': [ { 'f': true }, { 'f': false } ] } } + ] + } + >> + """ + ), + EvaluatorTestCase( // EXCLUDE select star collection index and nested collection wildcard + """ + SELECT * EXCLUDE t.a[1].b.d[*].e + FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': [{'e': 'zero0', 'f': true}, {'e': 'zero1', 'f': false}] } }, + { 'b': { 'c': 1, 'd': [{'e': 'one0', 'f': true}, {'e': 'one1', 'f': false}] } }, -- only `e` from this index to be excluded + { 'b': { 'c': 2, 'd': [{'e': 'two0', 'f': true}, {'e': 'two1', 'f': false}] } } + ] + } + >> AS t + """, + """ + << + { + 'a': [ + { 'b': { 'c': 0, 'd': [ { 'e': 'zero0', 'f': true }, { 'e': 'zero1', 'f': false } ] } }, + { 'b': { 'c': 1, 'd': [ { 'f': true }, { 'f': false } ] } }, + { 'b': { 'c': 2, 'd': [ { 'e': 'two0', 'f': true }, { 'e': 'two1', 'f': false } ] } } + ] + } + >> + """ + ), + EvaluatorTestCase( // EXCLUDE select star collection index and nested collection index + """ + SELECT * EXCLUDE t.a[1].b.d[0].e + FROM << + { + 'a': [ + { 'b': { 'c': 0, 'd': [{'e': 'zero0', 'f': true}, {'e': 'zero1', 'f': false}] } }, + { 'b': { 'c': 1, 'd': [{'e': 'one0', 'f': true}, {'e': 'one1', 'f': false}] } }, -- `e` from 0-th index of `d` to be excluded + { 'b': { 'c': 2, 'd': [{'e': 'two0', 'f': true}, {'e': 'two1', 'f': false}] } } + ] + } + >> AS t + """, + """ + << + { + 'a': [ + { 'b': { 'c': 0, 'd': [ { 'e': 'zero0', 'f': true }, { 'e': 'zero1', 'f': false } ] } }, + { 'b': { 'c': 1, 'd': [ { 'f': true }, { 'e': 'one1', 'f': false } ] } }, + { 'b': { 'c': 2, 'd': [ { 'e': 'two0', 'f': true }, { 'e': 'two1', 'f': false } ] } } + ] + } + >> + """ + ), + EvaluatorTestCase( // EXCLUDE select star tuple wildcard and subsequent tuple attr + """ + SELECT * + EXCLUDE + t.a.*.bar + FROM [ + { + 'a': { + 'b': { 'foo': 1, 'bar': 2 }, + 'c': { 'foo': 11, 'bar': 22 }, + 'd': { 'foo': 111, 'bar': 222 } + }, + 'foo': 'bar' + } + ] AS t + """, + """ + << + { + 'a': { + 'b': { 'foo': 1 }, + 'c': { 'foo': 11 }, + 'd': { 'foo': 111 } + }, + 'foo': 'bar' + } + >> + """ + ), + EvaluatorTestCase( // EXCLUDE select star with ORDER BY + """ + SELECT * + EXCLUDE + t.a + FROM [ + { + 'a': 2, + 'foo': 'bar2' + }, + { + 'a': 1, + 'foo': 'bar1' + }, + { + 'a': 3, + 'foo': 'bar3' + } + ] AS t + ORDER BY t.a + """, + """ + [ + { + 'foo': 'bar1' + }, + { + 'foo': 'bar2' + }, + { + 'foo': 'bar3' + } + ] + """ + ), + EvaluatorTestCase( // exclude select star with GROUP BY + """ + SELECT * + EXCLUDE g[*].t.c + FROM + << + { 'a': 1, 'b': 11, 'c': 111 }, + { 'a': 1, 'b': 22, 'c': 222 }, + { 'a': 2, 'b': 33, 'c': 333 } + >> AS t + GROUP BY t.a AS a GROUP AS g + """, + """ + << + { + 'a': 1, + 'g': << + { 't': { 'a': 1, 'b': 11 } }, + { 't': { 'a': 1, 'b': 22 } } + >> + }, + { + 'a': 2, + 'g': << + { 't': { 'a': 2, 'b': 33 } } + >> + } + >> + """ + ), + EvaluatorTestCase( // EXCLUDE select star with DISTINCT + """ + SELECT DISTINCT * + EXCLUDE t.a + FROM + << + { 'a': 1, 'b': 11, 'c': 111 }, + { 'a': 2, 'b': 11, 'c': 111 }, -- `b` and `c` same as above; `a` is different but will be excluded + { 'a': 1, 'b': 22, 'c': 222 } -- `b` and `c` different from above two rows; will be kept + >> AS t + """, + """ + << + {'b': 11, 'c': 111}, + {'b': 22, 'c': 222} + >> + """ + ), + EvaluatorTestCase( // EXCLUDE select star with ORDER BY, LIMIT, OFFSET + """ + SELECT DISTINCT * + EXCLUDE t.a + FROM + << + { 'a': 1, 'b': 11, 'c': 111 }, + { 'a': 2, 'b': 22, 'c': 222 }, + { 'a': 3, 'b': 33, 'c': 333 }, -- kept + { 'a': 4, 'b': 44, 'c': 444 }, -- kept + { 'a': 5, 'b': 55, 'c': 555 } + >> AS t + ORDER BY a + LIMIT 2 + OFFSET 2 + """, + """ + [ + {'b': 33, 'c': 333}, + {'b': 44, 'c': 444} + ] + """ + ), + EvaluatorTestCase( // EXCLUDE PIVOT + """ + PIVOT t.v AT t.attr + EXCLUDE t.v[*].excludeValue + FROM + << + { 'attr': 'a', 'v': [{'keepValue': 1, 'excludeValue': 11}, {'keepValue': 4, 'excludeValue': 44}] }, + { 'attr': 'b', 'v': [{'keepValue': 2, 'excludeValue': 22}, {'keepValue': 5, 'excludeValue': 55}] }, + { 'attr': 'c', 'v': [{'keepValue': 3, 'excludeValue': 33}, {'keepValue': 6, 'excludeValue': 66}] } + >> AS t + """, + """ + { + 'a': [{'keepValue': 1}, {'keepValue': 4}], + 'b': [{'keepValue': 2}, {'keepValue': 5}], + 'c': [{'keepValue': 3}, {'keepValue': 6}] + } + """ + ), + EvaluatorTestCase( // EXCLUDE UNPIVOT + """ + SELECT v, attr + EXCLUDE v.foo + FROM UNPIVOT + { + 'a': {'foo': 1, 'bar': 11}, + 'a': {'foo': 2, 'bar': 22}, + 'b': {'foo': 3, 'bar': 33} + } AS v AT attr + """, + """ + << + {'v': {'bar': 11}, 'attr': 'a'}, + {'v': {'bar': 22}, 'attr': 'a'}, + {'v': {'bar': 33}, 'attr': 'b'} + >> + """ + ), + EvaluatorTestCase( // EXCLUDE collection index on list + """ + SELECT * + EXCLUDE t.a[1] + FROM + << + {'a': [0, 1, 2]} -- index `1` to be excluded + >> AS t + """, + """<<{'a': [0, 2]}>>""" + ), + EvaluatorTestCase( // EXCLUDE collection index on bag -- nothing gets excluded; no error + """ + SELECT * + EXCLUDE t.a[1] + FROM + << + {'a': <<0, 1, 2>>} + >> AS t + """, + """<<{'a': <<0, 1, 2>>}>>""" + ), + EvaluatorTestCase( // EXCLUDE collection index on sexp + """ + SELECT * + EXCLUDE t.a[1] + FROM + << + {'a': `(0 1 2)`} -- index `1` to be excluded + >> AS t + """, + """<<{'a': `(0 2)`}>>""" + ), + EvaluatorTestCase( // EXCLUDE collection wildcard on list + """ + SELECT * + EXCLUDE t.a[*] + FROM + << + {'a': [0, 1, 2]} -- all indexes to be excluded; empty list as result + >> AS t + """, + """<<{'a': []}>>""" + ), + EvaluatorTestCase( // EXCLUDE collection wildcard on bag + """ + SELECT * + EXCLUDE t.a[*] + FROM + << + {'a': <<0, 1, 2>>} -- all indexes to be excluded; empty bag as result + >> AS t + """, + """<<{'a': <<>>}>>""" + ), + EvaluatorTestCase( // EXCLUDE collection wildcard on sexp + """ + SELECT * + EXCLUDE t.a[*] + FROM + << + {'a': `(0 1 2)`} -- all indexes to be excluded; empty sexp as result + >> AS t + """, + """<<{'a': `()`}>>""" + ), + EvaluatorTestCase( // EXCLUDE with duplicates + """ + SELECT * + EXCLUDE t.a + FROM + << + { + 'a': 1, -- to be excluded + 'a': 2, -- to be excluded + 'a': 3, -- to be excluded + 'b': 4, + 'c': 5 + } + >> AS t + """, + """<<{'b': 4, 'c': 5}>>""" + ), + EvaluatorTestCase( // EXCLUDE with non-existent paths; no error + """ + SELECT * + EXCLUDE t.path_does_not_exist, t.path_does_not_exist.*.foo, t.a['does not exist'], t.a + FROM + <<{'a': 1, 'b': 2}>> AS t -- only exclude `a` + """, + """<<{'b': 2}>>""" + ), + EvaluatorTestCase( // EXCLUDE with different FROM source bindings + """ + SELECT * + EXCLUDE t.a[*].bar, t.a.bar, t.a.*.bar -- EXCLUDE all `bar` + FROM + << + {'a': [{'foo': 0, 'bar': 1, 'baz': 2}, {'foo': 3, 'bar': 4, 'baz': 5}]}, + {'a': {'foo': 6, 'bar': 7, 'baz': 8}}, + {'a': {'a1': {'foo': 9, 'bar': 10, 'baz': 11}, 'a2': {'foo': 12, 'bar': 13, 'baz': 14}}} + >> AS t + """, + """ + << + {'a': [{'foo': 0, 'baz': 2}, {'foo': 3, 'baz': 5}]}, + {'a': {'foo': 6, 'baz': 8}}, + {'a': {'a1': {'foo': 9, 'baz': 11}, 'a2': {'foo': 12, 'baz': 14}}} + >> + """ + ), + EvaluatorTestCase( // EXCLUDE with GROUP BY, HAVING, and aggregation + """ + SELECT SUM(t.b) AS total, g + EXCLUDE g[*].t.c -- EXCLUDE + FROM + [ + {'a': 1, 'b': 2, 'c': 3}, -- group `t.a` = 1 + {'a': 1, 'b': 22, 'c': 33}, -- group `t.a` = 1 + {'a': 2, 'b': 222, 'c': 333} -- row/group omitted due to `HAVING` clause + ] AS t + GROUP BY t.a GROUP AS g + HAVING COUNT(t.a) > 1 + """, + """ + << + { + 'total': 24, -- `total` from row 1's `a` (2) + row 2's `a` (22) = 24 + 'g': <<{'t': {'a': 1, 'b': 2}}, {'t': {'a': 1, 'b': 22}}>> -- `EXCLUDE`s `c` + } + >> + """ + ), + ) + } + + @ParameterizedTest + @ArgumentsSource(ExcludeTests::class) + fun validExcludeTests(tc: EvaluatorTestCase) = testHarness.runEvaluatorTestCase( + tc, + EvaluationSession.standard() + ) + + private fun testExcludeExprSubsumption(tc: SubsumptionTC) { + val parser = PartiQLParserBuilder.standard().build() + val parsedSFW = parser.parseAstStatement("SELECT * EXCLUDE ${tc.excludeExprStr} FROM t") + val exclude = (((parsedSFW as PartiqlAst.Statement.Query).expr) as PartiqlAst.Expr.Select).excludeClause!! + val eC = EvaluatingCompiler( + emptyList(), + emptyMap(), + emptyMap(), + ) + val actualExcludeExprs = eC.compileExcludeClause(exclude) + assertEquals(tc.expectedExcludeExprs, actualExcludeExprs) + } + + internal data class SubsumptionTC(val excludeExprStr: String, val expectedExcludeExprs: List) + + @ParameterizedTest + @ArgumentsSource(ExcludeSubsumptionTests::class) + internal fun subsumptionTests(tc: SubsumptionTC) = testExcludeExprSubsumption(tc) + + internal class ExcludeSubsumptionTests : ArgumentsProviderBase() { + private fun caseSensitiveId(id: String): PartiqlAst.Identifier { + return PartiqlAst.build { identifier(name = id, case = caseSensitive(emptyMetaContainer())) } + } + private fun caseInsensitiveId(id: String): PartiqlAst.Identifier { + return PartiqlAst.build { identifier(name = id, case = caseInsensitive(emptyMetaContainer())) } + } + private fun exTupleAttr(id: PartiqlAst.Identifier): PartiqlAst.ExcludeStep { + return PartiqlAst.ExcludeStep.ExcludeTupleAttr(id) + } + private fun exTupleWildcard(): PartiqlAst.ExcludeStep { + return PartiqlAst.ExcludeStep.ExcludeTupleWildcard() + } + private fun exCollIndex(i: Int): PartiqlAst.ExcludeStep { + return PartiqlAst.ExcludeStep.ExcludeCollectionIndex(index = LongPrimitive(i.toLong(), emptyMetaContainer())) + } + private fun exCollWildcard(): PartiqlAst.ExcludeStep { + return PartiqlAst.ExcludeStep.ExcludeCollectionWildcard() + } + + override fun getParameters(): List = listOf( + SubsumptionTC( + "s.a, t.a, t.b, s.b", + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("s"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("b"))), + steps = emptyMap(), + ) + ), + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("b"))), + steps = emptyMap(), + ) + ) + ) + ), + SubsumptionTC( + "t.a, t.b", + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("b"))), + steps = emptyMap(), + ) + ) + ) + ), + SubsumptionTC( + "t.a, t.b, t.a, t.b, t.b", // duplicates subsumed + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("b"))), + steps = emptyMap(), + ) + ) + ) + ), + SubsumptionTC( + "t.a, t.b, t.*", // tuple wildcard subsumes tuple attr + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleWildcard()), + steps = emptyMap(), + ) + ) + ) + ), + SubsumptionTC( // removal at earlier step subsumes + """ + t.a, t.a.a1, -- t.a.a1 subsumed + t.b.b1.b2, t.b.b1, -- t.b.b1.b2 subsumed + t.c, t.c.c1[2].c3[*].* -- t.c.c1[2].c3[*].* subsumed + """, + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("c"))), + steps = mapOf( + exTupleAttr(caseInsensitiveId("b")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("b1"))), + steps = emptyMap() + ) + ), + ) + ) + ) + ), + SubsumptionTC( // exclude collection index + """ + t.a, t.a[1], + t.b[1], t.b + """, + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("b"))), + steps = emptyMap() + ) + ) + ) + ), + SubsumptionTC( // exclude collection index, collection wildcard + """ + t.a[*], t.a[1], + t.b[1], t.b[*], + t.c[*], t.c[1].c1, + t.d[1].d1, t.d[*] + """, + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = emptySet(), + steps = mapOf( + exTupleAttr(caseInsensitiveId("a")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("b")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("c")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("d")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + ) + ) + ) + ) + ), + SubsumptionTC( + """ + t.a[1].a1, t.a[1], + t.b[1], t.b[1].b1, + t.c[*], t.c[*].c1, + t.d[*].d1, t.d[*], + t.e[1], t.e[*].e1, -- keep both + t.f[*].f1, t.f[1], -- keep both + t.g[*], t.g[1].e1, + t.h[1].f1, t.h[*] + """, + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = emptySet(), + steps = mapOf( + exTupleAttr(caseInsensitiveId("a")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollIndex(1)), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("b")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollIndex(1)), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("c")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("d")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("e")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollIndex(1)), + steps = mapOf( + exCollWildcard() to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("e1"))), + steps = emptyMap(), + ) + ) + ), + exTupleAttr(caseInsensitiveId("f")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollIndex(1)), + steps = mapOf( + exCollWildcard() to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("f1"))), + steps = emptyMap(), + ) + ) + ), + exTupleAttr(caseInsensitiveId("g")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + exTupleAttr(caseInsensitiveId("h")) to EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exCollWildcard()), + steps = emptyMap() + ), + ) + ) + ) + ) + ), + SubsumptionTC( // case sensitive + """ + t.a, "t".a, -- "t".a in case-sensitive list + "t".b, t.b, -- "t".b in case-sensitive list + t."c", t.c, + t.d, t."d" + """, + listOf( + EvaluatingCompiler.CompiledExcludeExpr( + root = caseInsensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("b")), exTupleAttr(caseInsensitiveId("c")), exTupleAttr(caseInsensitiveId("d")), exTupleAttr(caseSensitiveId("c")), exTupleAttr(caseSensitiveId("d"))), + steps = emptyMap(), + ) + ), + EvaluatingCompiler.CompiledExcludeExpr( + root = caseSensitiveId("t"), + exclusions = EvaluatingCompiler.RemoveAndOtherSteps( + remove = setOf(exTupleAttr(caseInsensitiveId("a")), exTupleAttr(caseInsensitiveId("b"))), + steps = emptyMap(), + ) + ), + ) + ), + ) + } +} diff --git a/partiql-lang/src/test/kotlin/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt b/partiql-lang/src/test/kotlin/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt index 7a95ed7fa..4e31fa321 100644 --- a/partiql-lang/src/test/kotlin/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt +++ b/partiql-lang/src/test/kotlin/org/partiql/lang/eval/EvaluatingCompilerSelectStarTests.kt @@ -26,7 +26,7 @@ class EvaluatingCompilerSelectStarTests : EvaluatorTestBase() { class AddressedExprValue( private val innerExprValue: ExprValue, override val name: ExprValue, - override val address: ExprValue + override val address: ExprValue, ) : ExprValue by innerExprValue, Named, Addressed { // Need to override the asFacet provided by [innerExprValue] since it won't implement either facet. @@ -38,7 +38,7 @@ class EvaluatingCompilerSelectStarTests : EvaluatorTestBase() { AddressedExprValue( ExprValue.of(ion.singleValue(ionText)), ExprValue.newInt(index), - ExprValue.newString(address) + ExprValue.newString(address), ) @Test diff --git a/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt b/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt index dc59a9e84..911396c68 100644 --- a/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt +++ b/partiql-lang/src/test/kotlin/org/partiql/lang/planner/transforms/PartiQLSchemaInferencerTests.kt @@ -934,7 +934,7 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "EXCLUDE SELECT star list tuple wildcard", + name = "EXCLUDE SELECT star tuple wildcard as last step", key = key("exclude-07"), expected = BagType( StructType( @@ -995,7 +995,7 @@ class PartiQLSchemaInferencerTests { ) ), SuccessTestCase( - name = "EXCLUDE SELECT star with JOINs", + name = "EXCLUDE SELECT star with JOIN", key = key("exclude-09"), expected = BagType( StructType( diff --git a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/exclude.sql b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/exclude.sql index 3c862dd79..a6742e73c 100644 --- a/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/exclude.sql +++ b/partiql-planner/src/testFixtures/resources/inputs/schema_inferencer/exclude.sql @@ -234,9 +234,9 @@ SELECT * EXCLUDE t.a[*].b.d[*].e FROM << { 'a': [ - { 'b': { 'c': 0, 'd': [{'e': 'zero', 'f': true}] } }, - { 'b': { 'c': 1, 'd': [{'e': 'one', 'f': true}] } }, - { 'b': { 'c': 2, 'd': [{'e': 'two', 'f': true}] } } + { 'b': { 'c': 0, 'd': [ {'e': 'zero', 'f': true}] } }, + { 'b': { 'c': 1, 'd': [ {'e': 'one', 'f': true}] } }, + { 'b': { 'c': 2, 'd': [ {'e': 'two', 'f': true}] } } ] } >> AS t;