Github user codymarcel commented on a diff in the pull request:
https://github.com/apache/phoenix/pull/89#discussion_r32763915
--- Diff:
phoenix-pherf/src/main/java/org/apache/phoenix/pherf/workload/QueryExecutor.java
---
@@ -18,227 +18,271 @@
package org.apache.phoenix.pherf.workload;
-import java.sql.Connection;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.phoenix.pherf.PherfConstants.RunMode;
-import org.apache.phoenix.pherf.configuration.XMLConfigParser;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.phoenix.pherf.PherfConstants.RunMode;
+import org.apache.phoenix.pherf.configuration.*;
import org.apache.phoenix.pherf.result.*;
+import org.apache.phoenix.pherf.util.PhoenixUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.phoenix.pherf.configuration.DataModel;
-import org.apache.phoenix.pherf.configuration.ExecutionType;
-import org.apache.phoenix.pherf.configuration.Query;
-import org.apache.phoenix.pherf.configuration.QuerySet;
-import org.apache.phoenix.pherf.configuration.Scenario;
-import org.apache.phoenix.pherf.util.PhoenixUtil;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+
+public class QueryExecutor implements Workload {
+ private static final Logger logger =
LoggerFactory.getLogger(QueryExecutor.class);
+ private List<DataModel> dataModels;
+ private String queryHint;
+ private final RunMode runMode;
+ private final boolean exportCSV;
+ private final ExecutorService pool;
+ private final XMLConfigParser parser;
+ private final PhoenixUtil util;
+
+ public QueryExecutor(XMLConfigParser parser, PhoenixUtil util,
ExecutorService pool) {
+ this(parser, util, pool, parser.getDataModels(), null, false,
RunMode.PERFORMANCE);
+ }
+
+ public QueryExecutor(XMLConfigParser parser,
+ PhoenixUtil util,
+ ExecutorService pool,
+ List<DataModel> dataModels,
+ String queryHint,
+ boolean exportCSV, RunMode runMode) {
+ this.parser = parser;
+ this.queryHint = queryHint;
+ this.exportCSV = exportCSV;
+ this.runMode = runMode;
+ this.dataModels = dataModels;
+ this.pool = pool;
+ this.util = util;
+ }
+
+ @Override
+ public void complete() {
+
+ }
+
+ /**
+ * Calls in Multithreaded Query Executor for all datamodels
+ *
+ * @throws Exception
+ */
+ public Runnable execute() throws Exception {
+ Runnable runnable = null;
+ for (DataModel dataModel : dataModels) {
+ if (exportCSV) {
+ runnable = exportAllScenarios(dataModel);
+ } else {
+ runnable = executeAllScenarios(dataModel);
+ }
+ }
+ return runnable;
+ }
+
+ /**
+ * Export all queries results to CSV
+ *
+ * @param dataModel
+ * @throws Exception
+ */
+ protected Runnable exportAllScenarios(final DataModel dataModel)
throws Exception {
+ return new Runnable() {
+ @Override
+ public void run() {
+ try {
+
+ List<Scenario> scenarios = dataModel.getScenarios();
+ QueryVerifier exportRunner = new QueryVerifier(false);
+ for (Scenario scenario : scenarios) {
+ for (QuerySet querySet : scenario.getQuerySet()) {
+ util.executeQuerySetDdls(querySet);
+ for (Query query : querySet.getQuery()) {
+ exportRunner.exportCSV(query);
+ }
+ }
+ }
+ } catch (Exception e) {
+ logger.warn("", e);
+ }
+ }
+ };
+ }
+
+ /**
+ * Execute all scenarios
+ *
+ * @param dataModel
+ * @throws Exception
+ */
+ protected Runnable executeAllScenarios(final DataModel dataModel)
throws Exception {
+ return new Runnable() {
+ @Override
+ public void run() {
+ List<DataModelResult> dataModelResults = new ArrayList<>();
+ DataModelResult
+ dataModelResult =
+ new DataModelResult(dataModel,
PhoenixUtil.getZookeeper());
+ ResultManager
+ resultManager =
+ new ResultManager(dataModelResult.getName(),
QueryExecutor.this.runMode);
+
+ dataModelResults.add(dataModelResult);
+ List<Scenario> scenarios = dataModel.getScenarios();
+ Configuration conf = HBaseConfiguration.create();
+ Map<String, String> phoenixProperty =
conf.getValByRegex("phoenix");
+ try {
+
+ for (Scenario scenario : scenarios) {
+ ScenarioResult scenarioResult = new
ScenarioResult(scenario);
+
scenarioResult.setPhoenixProperties(phoenixProperty);
+
dataModelResult.getScenarioResult().add(scenarioResult);
+ WriteParams writeParams =
scenario.getWriteParams();
+
+ if (writeParams != null) {
--- End diff --
This is where writers are started during read scenarios. This is what ties
it all together.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---