Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import static org.apache.phoenix.monitoring.MetricType.UPSERT_SQL_COUNTER;
import static org.apache.phoenix.monitoring.MetricType.UPSERT_SQL_QUERY_TIME;
import static org.apache.phoenix.monitoring.MetricType.UPSERT_SUCCESS_SQL_COUNTER;
import static org.apache.phoenix.query.QueryServices.CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED;

import java.io.File;
import java.io.IOException;
Expand All @@ -65,12 +66,15 @@
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.client.Consistency;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
Expand All @@ -88,6 +92,7 @@
import org.apache.phoenix.compile.DeleteCompiler;
import org.apache.phoenix.compile.DropSequenceCompiler;
import org.apache.phoenix.compile.ExplainPlan;
import org.apache.phoenix.compile.ExplainPlanAttributes;
import org.apache.phoenix.compile.ExpressionProjector;
import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
import org.apache.phoenix.compile.ListJarsQueryPlan;
Expand Down Expand Up @@ -300,6 +305,7 @@ public String toString() {
private int maxRows;
private int fetchSize = -1;
private int queryTimeoutMillis;
private boolean explainPlanLoggingEnabled;
// Caching per Statement
protected final Calendar localCalendar = Calendar.getInstance();
private boolean validateLastDdlTimestamp;
Expand All @@ -309,6 +315,9 @@ public PhoenixStatement(PhoenixConnection connection) {
this.queryTimeoutMillis = getDefaultQueryTimeoutMillis();
this.validateLastDdlTimestamp = ValidateLastDDLTimestampUtil
.getValidateLastDdlTimestampEnabled(this.connection);
this.explainPlanLoggingEnabled = connection.getQueryServices().getProps().getBoolean(
CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED,
QueryServicesOptions.DEFAULT_CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED);
}

/**
Expand Down Expand Up @@ -407,6 +416,9 @@ private PhoenixResultSet executeQuery(final CompilableStatement stmt,
LOGGER.debug(LogUtil.addCustomAnnotations(
"Explain plan: " + explainPlan, connection));
}
if (explainPlanLoggingEnabled) {
updateExplainPlanInformation(plan);
}
StatementContext context = plan.getContext();
context.setQueryLogger(queryLogger);
if (queryLogger.isDebugEnabled()) {
Expand Down Expand Up @@ -2872,4 +2884,40 @@ public Calendar getLocalCalendar() {
return localCalendar;
}

private void updateExplainPlanInformation(QueryPlan plan) throws SQLException {
if (plan == null || !getConnection().getActivityLogger()
.isLevelEnabled(ActivityLogInfo.EXPLAIN_PLAN.getLogLevel())) {
return;
}

ExplainPlan explainPlan = plan.getExplainPlan();
ExplainPlanAttributes explainPlanAttributes = explainPlan.getPlanStepsAsAttributes();

List<HRegionLocation> location = explainPlanAttributes.getRegionLocations();
String regionInfo = getRegionInfo(location);

String sb = Stream.of(
explainPlanAttributes.getExplainScanType(),
regionInfo)
.collect(Collectors.joining(","));
updateActivityOnConnection(ActivityLogInfo.EXPLAIN_PLAN, sb);
}

private String getRegionInfo(List<HRegionLocation> location) {
if (location == null || location.isEmpty()) {
return "";
}

String regions = location.stream()
.map(regionLocation -> regionLocation.getRegion().getEncodedName())
.collect(Collectors.joining(","));

String hostnames = location.stream()
.map(HRegionLocation::getHostname)
.collect(Collectors.joining(","));

return QueryUtil.REGIONS + "={" + regions + "},"
+ QueryUtil.HOSTNAMES + "={" + hostnames + "}";
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ public enum ActivityLogInfo {
REQUEST_ID("r", LogLevel.INFO,PVarchar.INSTANCE),
TABLE_NAME("n", LogLevel.INFO,PVarchar.INSTANCE),
OP_NAME("o", LogLevel.INFO,PVarchar.INSTANCE),
OP_STMTS("#", LogLevel.INFO, PInteger.INSTANCE);
OP_STMTS("#", LogLevel.INFO, PInteger.INSTANCE),
EXPLAIN_PLAN("ep", LogLevel.INFO, PVarchar.INSTANCE);

public final String shortName;
public final LogLevel logLevel;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,4 +147,13 @@ public boolean isLevelEnabled(LogLevel logLevel) {
return this.logLevel != null && logLevel != LogLevel.OFF ? logLevel.ordinal() <= this.logLevel.ordinal()
: false;
}

/**
* Get the Explain plan information.
*/
public String getExplainPlanInfo() {
return isLevelEnabled(ActivityLogInfo.EXPLAIN_PLAN.getLogLevel())
? activityList.get(ActivityLogInfo.EXPLAIN_PLAN.ordinal())
: null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,8 @@ public interface QueryServices extends SQLCloseable {
"phoenix.internal.connection.max.allowed.connections";
public static final String CONNECTION_ACTIVITY_LOGGING_ENABLED =
"phoenix.connection.activity.logging.enabled";
String CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED =
"phoenix.connection.activity.logging.explain.plan.enabled";
public static final String CONNECTION_ACTIVITY_LOGGING_INTERVAL =
"phoenix.connection.activity.logging.interval";
public static final String DEFAULT_COLUMN_ENCODED_BYTES_ATRRIB = "phoenix.default.column.encoded.bytes.attrib";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import static org.apache.phoenix.query.QueryServices.COLLECT_REQUEST_LEVEL_METRICS;
import static org.apache.phoenix.query.QueryServices.COMMIT_STATS_ASYNC;
import static org.apache.phoenix.query.QueryServices.CONNECTION_ACTIVITY_LOGGING_ENABLED;
import static org.apache.phoenix.query.QueryServices.CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED;
import static org.apache.phoenix.query.QueryServices.CONNECTION_ACTIVITY_LOGGING_INTERVAL;
import static org.apache.phoenix.query.QueryServices.CONNECTION_QUERY_SERVICE_HISTOGRAM_SIZE_RANGES;
import static org.apache.phoenix.query.QueryServices.CONNECTION_QUERY_SERVICE_METRICS_ENABLED;
Expand Down Expand Up @@ -371,6 +372,7 @@ public class QueryServicesOptions {
public static final int DEFAULT_INTERNAL_CONNECTION_MAX_ALLOWED_CONNECTIONS = 0;

public static final boolean DEFAULT_CONNECTION_ACTIVITY_LOGGING_ENABLED = false;
public static final boolean DEFAULT_CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED = false;
public static final int DEFAULT_CONNECTION_ACTIVITY_LOGGING_INTERVAL_IN_MINS = 15;
public static final boolean DEFAULT_STATS_COLLECTION_ENABLED = true;
public static final boolean DEFAULT_USE_STATS_FOR_PARALLELIZATION = true;
Expand Down Expand Up @@ -571,6 +573,8 @@ public static QueryServicesOptions withDefaults() {
DEFAULT_SERVER_MERGE_FOR_UNCOVERED_INDEX)
.setIfUnset(MAX_IN_LIST_SKIP_SCAN_SIZE, DEFAULT_MAX_IN_LIST_SKIP_SCAN_SIZE)
.setIfUnset(CONNECTION_ACTIVITY_LOGGING_ENABLED, DEFAULT_CONNECTION_ACTIVITY_LOGGING_ENABLED)
.setIfUnset(CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED,
DEFAULT_CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED)
.setIfUnset(CONNECTION_ACTIVITY_LOGGING_INTERVAL,
DEFAULT_CONNECTION_ACTIVITY_LOGGING_INTERVAL_IN_MINS)
.setIfUnset(CLUSTER_ROLE_BASED_MUTATION_BLOCK_ENABLED,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,8 @@ public final class QueryUtil {
public static final int DATA_TYPE_NAME_POSITION = 6;

public static final String IS_SERVER_CONNECTION = "IS_SERVER_CONNECTION";
public static final String REGIONS = "regions";
public static final String HOSTNAMES = "hostnames";
private static final String SELECT = "SELECT";
private static final String FROM = "FROM";
private static final String WHERE = "WHERE";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.SQLException;
import java.sql.PreparedStatement;
import java.sql.Timestamp;
Expand Down Expand Up @@ -199,6 +200,51 @@ public void testActivityLogsOnQueryWhenFailures() throws Exception {

}

@Test
public void testQueryExplainPlan() throws Exception {

String query = "SELECT * FROM " + tableName; //FULL SCAN

if(getConnection() instanceof PhoenixConnection) {
try (PhoenixConnection pconn = getConnection().unwrap(PhoenixConnection.class); Statement stmt = pconn.createStatement()) {
ResultSet rs = stmt.executeQuery(query);
while (rs.next()) {
// do nothing
}
boolean queryPlanFound = false;
String queryPlan = pconn.getActivityLogger().getExplainPlanInfo();
if (queryPlan != null && queryPlan.contains("FULL SCAN") &&
queryPlan.contains("regions=") && queryPlan.contains("hostnames=")) {
queryPlanFound = true;

// Extract regions and hostnames efficiently
String regions = extractBetweenBraces(queryPlan, "regions={");
String hostnames = extractBetweenBraces(queryPlan, "hostnames={");

assertFalse("Regions should not be empty", regions.trim().isEmpty());
assertFalse("Hostnames should not be empty", hostnames.trim().isEmpty());

}
assertTrue("Query plan should contain FULL SCAN, regions, and hostnames", queryPlanFound);
}
try (PhoenixConnection pconn = getConnection().unwrap(PhoenixConnection.class);) {
loadData(pconn, "PhoenixTest", "1", 10, 2);
String queryPlan = pconn.getActivityLogger().getExplainPlanInfo();
assertTrue("Query plan should be empty for non-query operations", queryPlan.isEmpty());
}
} else {
// for HA case, ignoring as parallelPhoenixConnection object doesn't have activity Logger object defined.
assertTrue(getConnection() instanceof ParallelPhoenixConnection);
}

}

private String extractBetweenBraces(String text, String marker) {
int start = text.indexOf(marker) + marker.length();
int end = text.indexOf("}", start);
return text.substring(start, end);
}

protected abstract ConnectionLimiter getConnectionLimiter() throws Exception ;

protected int runSampleActivity(ActivityType activityType, int clientPool, int clientQueue, int numRows, int connNotClosed) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ public static void doSetup() throws Exception {
conf.set(QueryServices.INTERNAL_CONNECTION_MAX_ALLOWED_CONNECTIONS, String.valueOf(20));
conf.set(PhoenixHAExecutorServiceProvider.HA_MAX_POOL_SIZE, String.valueOf(5));
conf.set(PhoenixHAExecutorServiceProvider.HA_MAX_QUEUE_SIZE, String.valueOf(30));
conf.set(QueryServices.CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED, String.valueOf(true));
conf.set(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, ZKConnectionInfo.ZK_REGISTRY_NAME);
return conf;
}
Expand All @@ -76,6 +77,7 @@ public static void doSetup() throws Exception {
conf.set(QueryServices.INTERNAL_CONNECTION_MAX_ALLOWED_CONNECTIONS, String.valueOf(20));
conf.set(PhoenixHAExecutorServiceProvider.HA_MAX_POOL_SIZE, String.valueOf(5));
conf.set(PhoenixHAExecutorServiceProvider.HA_MAX_QUEUE_SIZE, String.valueOf(30));
conf.set(QueryServices.CONNECTION_EXPLAIN_PLAN_LOGGING_ENABLED, String.valueOf(true));
conf.set(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, ZKConnectionInfo.ZK_REGISTRY_NAME);
Configuration copy = new Configuration(conf);
copy.addResource(confToClone);
Expand Down