Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iotdb.db.it.query;

import org.apache.iotdb.db.queryengine.execution.operator.sink.IdentitySinkOperator;
import org.apache.iotdb.db.queryengine.execution.operator.source.ExchangeOperator;
import org.apache.iotdb.it.env.EnvFactory;
import org.apache.iotdb.it.framework.IoTDBTestRunner;
import org.apache.iotdb.itbase.category.LocalStandaloneIT;
import org.apache.iotdb.itbase.env.BaseEnv;

import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

@RunWith(IoTDBTestRunner.class)
@Category({LocalStandaloneIT.class})
public class IoTDBExplainAnalyzePrintIT {

private static final String[] creationSqls =
new String[] {
"insert into root.test.device_0(s1, s2, s3, s4, s5, s6, s7, s8, s9, s10) values(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)",
"insert into root.test.device_1(s11) values(11)",
};

@BeforeClass
public static void setUp() throws Exception {
EnvFactory.getEnv().initClusterEnvironment();
prepareData();
}

@AfterClass
public static void tearDown() throws Exception {
EnvFactory.getEnv().cleanClusterEnvironment();
}

private static void prepareData() {
try (Connection connection = EnvFactory.getEnv().getConnection(BaseEnv.TREE_SQL_DIALECT);
Statement statement = connection.createStatement()) {
for (String sql : creationSqls) {
statement.execute(sql);
}
} catch (Exception e) {
fail(e.getMessage());
}
}

@Test
public void testIdentitySinkOperatorWhenMergedInAnalyze() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet =
statement.executeQuery("explain analyze select * from root.test.device_0")) {
boolean found = false;
while (resultSet.next()) {
if (resultSet.getString(1).contains(IdentitySinkOperator.DOWNSTREAM_PLAN_NODE_ID)) {
found = true;
break;
}
}
assertTrue(
"explain analyze output should contain DownStreamPlanNodeId in IdentitySinkOperator",
found);
}
}

@Test
public void testExchangeOperatorWhenMergedInAnalyze() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement();
ResultSet resultSet =
statement.executeQuery(
"explain analyze select * from root.test.device_0, root.test.device_1")) {
boolean found = false;
while (resultSet.next()) {
if (resultSet.getString(1).contains(ExchangeOperator.SIZE_IN_BYTES)) {
found = true;
break;
}
}
assertTrue("explain analyze output should contain size_in_bytes", found);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ private void verifyExplainMetrics(

try (Statement statement = connection.createStatement()) {
String sql = "explain analyze verbose select * from " + device + " where " + condition;
log.info(sql);
ResultSet resultSet = statement.executeQuery(sql);

StringBuilder stringBuilder = new StringBuilder();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,6 @@ private boolean fillFragmentInstanceStatistics(
} else {
String planNodeId = operatorContext.getPlanNodeId().toString();
operatorStatistics.setCount(1);
operatorStatistics.getSpecifiedInfo().clear();
leadOverloadOperators.put(operatorType, planNodeId);
operatorStatisticsMap.put(planNodeId, operatorStatistics);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,27 @@ public interface SpecifiedInfoMerger {
return first;
};

// currently the sink and shuffle operator only have the field of string type,
// and the case that two operators contained in one FI do not exist yet
private static final SpecifiedInfoMerger SINK_SHUFFLE_MERGER =
(first, second) -> {
first.replaceAll((k, v) -> v + " " + second.get(k));
return first;
};

/** Maintain different merge logic for specified info for different operators. */
public static SpecifiedInfoMerger getMerger(String operatorType) {
switch (operatorType) {
case "TreeSortOperator":
case "TreeMergeSortOperator":
case "TableSortOperator":
case "TableMergeSortOperator":
case "FilterAndProjectOperator":
case "ExchangeOperator":
return LONG_MERGER;
case "IdentitySinkOperator":
case "ShuffleHelperOperator":
return SINK_SHUFFLE_MERGER;
default:
return DEFAULT_MERGER;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,6 @@ public static void mergeAllOperatorStatistics(
} else {
TOperatorStatistics operatorStatistics = entry.getValue();
operatorStatistics.setCount(1);
// Can't merge specifiedInfo of String-type, so just clear it
operatorStatistics.getSpecifiedInfo().clear();
// keep the first one in operatorStatisticsMap as the only-one leadOverloadOperator
leadOverloadOperators.put(
operatorStatistics.getOperatorType(), operatorStatistics.getPlanNodeId());
Expand Down