From a4ead63d33cb21ce30e8ee16eed0b7313e3b22bf Mon Sep 17 00:00:00 2001 From: ABing <101158374+ABingHuang@users.noreply.github.com> Date: Thu, 24 Aug 2023 18:26:39 +0800 Subject: [PATCH] [BugFix] limit mv with window function (#29325) * limit mv with window function Signed-off-by: ABingHuang (cherry picked from commit c43bdfe12365fcbaa5e523983453a1c829cbd051) # Conflicts: # fe/fe-core/src/main/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzer.java --- .../analyzer/MaterializedViewAnalyzer.java | 35 ++++++++++++++++ .../MaterializedViewAnalyzerTest.java | 40 +++++++++++++++++++ 2 files changed, 75 insertions(+) diff --git a/fe/fe-core/src/main/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzer.java b/fe/fe-core/src/main/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzer.java index 29ad22f3c37c9..377e4fb99b147 100644 --- a/fe/fe-core/src/main/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzer.java +++ b/fe/fe-core/src/main/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzer.java @@ -19,6 +19,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import com.starrocks.analysis.AnalyticExpr; import com.starrocks.analysis.Expr; import com.starrocks.analysis.FunctionCallExpr; import com.starrocks.analysis.IntLiteral; @@ -272,6 +273,7 @@ public Void visitCreateMaterializedViewStatement(CreateMaterializedViewStatement checkPartitionExpPatterns(statement); // check partition column must be base table's partition column checkPartitionColumnWithBaseTable(statement, aliasTableMap); + checkWindowFunctions(statement, columnExprMap); } // check and analyze distribution checkDistribution(statement, aliasTableMap); @@ -771,6 +773,39 @@ private void checkPartitionColumnWithBaseHMSTable(SlotRef slotRef, HiveMetaStore } } +<<<<<<< HEAD +======= + private void checkPartitionColumnWithBaseHMSTable(SlotRef slotRef, HiveMetaStoreTable table) { + checkPartitionColumnWithBaseTable(slotRef, table.getPartitionColumns(), table.isUnPartitioned()); + } + + private void checkPartitionColumnWithBaseJDBCTable(SlotRef slotRef, JDBCTable table) { + checkPartitionColumnWithBaseTable(slotRef, table.getPartitionColumns(), table.isUnPartitioned()); + } + + // if mv is partitioned, mv will be refreshed by partition. + // if mv has window functions, it should also be partitioned by and the partition by columns + // should contain the partition column of mv + private void checkWindowFunctions( + CreateMaterializedViewStatement statement, + Map columnExprMap) { + SlotRef partitionSlotRef = getSlotRef(statement.getPartitionRefTableExpr()); + // should analyze the partition expr to get type info + PartitionExprAnalyzer.analyzePartitionExpr(statement.getPartitionRefTableExpr(), partitionSlotRef); + for (Expr columnExpr : columnExprMap.values()) { + if (columnExpr instanceof AnalyticExpr) { + AnalyticExpr analyticExpr = columnExpr.cast(); + if (analyticExpr.getPartitionExprs() == null + || !analyticExpr.getPartitionExprs().contains(statement.getPartitionRefTableExpr())) { + throw new SemanticException("window function %s ’s partition expressions" + + " should contain the partition column %s of materialized view", + analyticExpr.getFnCall().getFnName().getFunction(), statement.getPartitionColumn().getName()); + } + } + } + } + +>>>>>>> c43bdfe123 ([BugFix] limit mv with window function (#29325)) private void checkPartitionColumnWithBaseIcebergTable(SlotRef slotRef, IcebergTable table) { org.apache.iceberg.Table icebergTable = table.getNativeTable(); PartitionSpec partitionSpec = icebergTable.spec(); diff --git a/fe/fe-core/src/test/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzerTest.java b/fe/fe-core/src/test/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzerTest.java index 2b17b20e3bf27..836e3c9840cfd 100644 --- a/fe/fe-core/src/test/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzerTest.java +++ b/fe/fe-core/src/test/java/com/starrocks/sql/analyzer/MaterializedViewAnalyzerTest.java @@ -125,4 +125,44 @@ public void testCreateMvWithNotExistResourceGroup() { Assert.assertThrows("resource_group not_exist_rg does not exist.", DdlException.class, () -> starRocksAssert.useDatabase("test").withMaterializedView(sql)); } + + @Test + public void testCreateMvWithWindowFunction() throws Exception { + { + String mvSql = "create materialized view window_mv_1\n" + + "partition by date_trunc('month', k1)\n" + + "distributed by hash(k2)\n" + + "refresh manual\n" + + "as\n" + + "select \n" + + "\tk2, k1, row_number() over (partition by date_trunc('month', k1) order by k2)\n" + + "from tbl1 \n"; + starRocksAssert.useDatabase("test").withMaterializedView(mvSql); + } + + { + String mvSql = "create materialized view window_mv_2\n" + + "partition by k1\n" + + "distributed by hash(k2)\n" + + "refresh manual\n" + + "as\n" + + "select \n" + + "\tk2, k1, row_number() over (partition by k1 order by k2)\n" + + "from tbl1 \n"; + starRocksAssert.useDatabase("test").withMaterializedView(mvSql); + } + + { + String mvSql = "create materialized view window_mv_3\n" + + "partition by k1\n" + + "distributed by hash(k2)\n" + + "refresh manual\n" + + "as\n" + + "select \n" + + "\tk2, k1, row_number() over (order by k2)\n" + + "from tbl1 \n"; + analyzeFail(mvSql, "Detail message: window function row_number ’s partition expressions" + + " should contain the partition column k1 of materialized view"); + } + } }