Michael Blow has submitted this change and it was merged.

Change subject: Cleanup Logging, Report Joined Nodes, Misc Cleanup
......................................................................


Cleanup Logging, Report Joined Nodes, Misc Cleanup

- Minor refactoring of NodeControllerService startup
- Cleanup logging in GlobalRecoveryManager / LifeCycleComponentManager
- Enable TestExecutor to accept non-200 status codes
- Use ExecutorService for GlobalRecovery thread
- Eliminate NPE when metadata node goes down before global recovery
  starts

Change-Id: I87b6b45e1a0cdc7a8b77d80b4e603d927aa60b8a
Reviewed-on: https://asterix-gerrit.ics.uci.edu/1706
Tested-by: Jenkins <jenk...@fulliautomatix.ics.uci.edu>
BAD: Jenkins <jenk...@fulliautomatix.ics.uci.edu>
Reviewed-by: Till Westmann <ti...@apache.org>
Integration-Tests: Jenkins <jenk...@fulliautomatix.ics.uci.edu>
---
M 
asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
M 
asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
M 
asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
M 
asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
M 
hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCServiceContext.java
M 
hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
M 
hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
M 
hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
8 files changed, 177 insertions(+), 170 deletions(-)

Approvals:
  Till Westmann: Looks good to me, approved
  Jenkins: Verified; No violations found; Verified

Objections:
  Jenkins: Violations found



diff --git 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 9c66f57..bf7d5eb 100644
--- 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -123,7 +123,7 @@
                 .create(ClusterProperties.INSTANCE.getCluster(), repStrategy, 
ccServiceCtx);
         ExternalLibraryUtils.setUpExternaLibraries(libraryManager, false);
         componentProvider = new StorageComponentProvider();
-        GlobalRecoveryManager.instantiate((HyracksConnection) getHcc(), 
componentProvider);
+        GlobalRecoveryManager.instantiate(ccServiceCtx, getHcc(), 
componentProvider);
         appCtx = new CcApplicationContext(ccServiceCtx, getHcc(), 
libraryManager, resourceIdManager,
                 () -> MetadataManager.INSTANCE, 
GlobalRecoveryManager.instance(), ftStrategy,
                 new ActiveLifecycleListener());
diff --git 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
index 722bb78..1816a25 100644
--- 
a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
+++ 
b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
@@ -45,7 +45,8 @@
 import org.apache.asterix.metadata.utils.ExternalIndexingOperations;
 import org.apache.asterix.metadata.utils.MetadataConstants;
 import org.apache.asterix.runtime.utils.ClusterStateManager;
-import org.apache.hyracks.api.client.HyracksConnection;
+import org.apache.hyracks.api.application.ICCServiceContext;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
 import org.apache.hyracks.api.job.JobId;
 import org.apache.hyracks.api.job.JobSpecification;
 
@@ -55,10 +56,13 @@
     private static GlobalRecoveryManager instance;
     private static ClusterState state;
     private final IStorageComponentProvider componentProvider;
-    private HyracksConnection hcc;
+    private final ICCServiceContext ccServiceCtx;
+    private IHyracksClientConnection hcc;
 
-    private GlobalRecoveryManager(HyracksConnection hcc, 
IStorageComponentProvider componentProvider) {
+    private GlobalRecoveryManager(ICCServiceContext ccServiceCtx, 
IHyracksClientConnection hcc,
+                                  IStorageComponentProvider componentProvider) 
{
         setState(ClusterState.UNUSABLE);
+        this.ccServiceCtx = ccServiceCtx;
         this.hcc = hcc;
         this.componentProvider = componentProvider;
     }
@@ -97,142 +101,140 @@
         final ClusterState newState = ClusterStateManager.INSTANCE.getState();
         boolean needToRecover = !newState.equals(state) && (newState == 
ClusterState.ACTIVE);
         if (needToRecover) {
-            Thread recoveryThread = new Thread(new Runnable() {
-                @Override
-                public void run() {
-                    LOGGER.info("Starting AsterixDB's Global Recovery");
-                    MetadataTransactionContext mdTxnCtx = null;
-                    try {
-                        Thread.sleep(4000);
-                        MetadataManager.INSTANCE.init();
-                        // Loop over datasets
-                        mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-                        List<Dataverse> dataverses = 
MetadataManager.INSTANCE.getDataverses(mdTxnCtx);
-                        for (Dataverse dataverse : dataverses) {
-                            if 
(!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME))
 {
-                                MetadataProvider metadataProvider =
-                                        new MetadataProvider(appCtx, 
dataverse, componentProvider);
-                                try {
-                                    List<Dataset> datasets = 
MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
-                                            dataverse.getDataverseName());
-                                    for (Dataset dataset : datasets) {
-                                        if (dataset.getDatasetType() == 
DatasetType.EXTERNAL) {
-                                            // External dataset
-                                            // Get indexes
-                                            List<Index> indexes = 
MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
-                                                    
dataset.getDataverseName(), dataset.getDatasetName());
-                                            // Get the state of the dataset
-                                            ExternalDatasetDetails dsd =
-                                                    (ExternalDatasetDetails) 
dataset.getDatasetDetails();
-                                            TransactionState datasetState = 
dsd.getState();
-                                            if (!indexes.isEmpty()) {
-                                                if (datasetState == 
TransactionState.BEGIN) {
-                                                    List<ExternalFile> files = 
MetadataManager.INSTANCE
-                                                            
.getDatasetExternalFiles(mdTxnCtx, dataset);
-                                                    // if persumed abort, roll 
backward
-                                                    // 1. delete all pending 
files
-                                                    for (ExternalFile file : 
files) {
-                                                        if 
(file.getPendingOp() != ExternalFilePendingOp.NO_OP) {
-                                                            
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
-                                                        }
-                                                    }
-                                                }
-                                                // 2. clean artifacts in NCs
-                                                
metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                JobSpecification jobSpec = 
ExternalIndexingOperations
-                                                        .buildAbortOp(dataset, 
indexes, metadataProvider);
-                                                executeHyracksJob(jobSpec);
-                                                // 3. correct the dataset state
-                                                ((ExternalDatasetDetails) 
dataset.getDatasetDetails())
-                                                        
.setState(TransactionState.COMMIT);
-                                                
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
-                                                
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                                                mdTxnCtx = 
MetadataManager.INSTANCE.beginTransaction();
-                                            } else if (datasetState == 
TransactionState.READY_TO_COMMIT) {
-                                                List<ExternalFile> files = 
MetadataManager.INSTANCE
-                                                        
.getDatasetExternalFiles(mdTxnCtx, dataset);
-                                                // if ready to commit, roll 
forward
-                                                // 1. commit indexes in NCs
-                                                
metadataProvider.setMetadataTxnContext(mdTxnCtx);
-                                                JobSpecification jobSpec = 
ExternalIndexingOperations
-                                                        
.buildRecoverOp(dataset, indexes, metadataProvider);
-                                                executeHyracksJob(jobSpec);
-                                                // 2. add pending files in 
metadata
-                                                for (ExternalFile file : 
files) {
-                                                    if (file.getPendingOp() == 
ExternalFilePendingOp.ADD_OP) {
-                                                        
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
-                                                        
file.setPendingOp(ExternalFilePendingOp.NO_OP);
-                                                        
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
-                                                    } else if 
(file.getPendingOp() == ExternalFilePendingOp.DROP_OP) {
-                                                        // find original file
-                                                        for (ExternalFile 
originalFile : files) {
-                                                            if 
(originalFile.getFileName().equals(file.getFileName())) {
-                                                                
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        file);
-                                                                
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        
originalFile);
-                                                                break;
-                                                            }
-                                                        }
-                                                    } else if 
(file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) {
-                                                        // find original file
-                                                        for (ExternalFile 
originalFile : files) {
-                                                            if 
(originalFile.getFileName().equals(file.getFileName())) {
-                                                                
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        file);
-                                                                
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx,
-                                                                        
originalFile);
-                                                                
originalFile.setSize(file.getSize());
-                                                                
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx,
-                                                                        
originalFile);
-                                                            }
-                                                        }
-                                                    }
-                                                    // 3. correct the dataset 
state
-                                                    ((ExternalDatasetDetails) 
dataset.getDatasetDetails())
-                                                            
.setState(TransactionState.COMMIT);
-                                                    
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
-                                                    
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                                                    mdTxnCtx = 
MetadataManager.INSTANCE.beginTransaction();
-                                                }
-                                            }
-                                        }
-                                    }
-                                } finally {
-                                    metadataProvider.getLocks().unlock();
-                                }
-                            }
-                        }
-                        MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-                    } catch (Exception e) {
-                        // This needs to be fixed <-- Needs to shutdown the 
system -->
-                        /*
-                         * Note: Throwing this illegal state exception will 
terminate this thread
-                         * and feeds listeners will not be notified.
-                         */
-                        LOGGER.log(Level.SEVERE, "Global recovery was not 
completed successfully: ", e);
+            setState(newState);
+            ccServiceCtx.getControllerService().getExecutor().submit(() -> {
+                LOGGER.info("Starting Global Recovery");
+                MetadataTransactionContext mdTxnCtx = null;
+                try {
+                    MetadataManager.INSTANCE.init();
+                    // Loop over datasets
+                    mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+                    for (Dataverse dataverse : 
MetadataManager.INSTANCE.getDataverses(mdTxnCtx)) {
+                        mdTxnCtx = recoverDataset(appCtx, mdTxnCtx, dataverse);
+                    }
+                    MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                } catch (Exception e) {
+                    // This needs to be fixed <-- Needs to shutdown the system 
-->
+                    /*
+                     * Note: Throwing this illegal state exception will 
terminate this thread
+                     * and feeds listeners will not be notified.
+                     */
+                    LOGGER.log(Level.SEVERE, "Global recovery was not 
completed successfully: ", e);
+                    if (mdTxnCtx != null) {
                         try {
                             
MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
                         } catch (Exception e1) {
                             LOGGER.log(Level.SEVERE, "Exception in aborting", 
e1);
+                            e1.addSuppressed(e);
                             throw new IllegalStateException(e1);
                         }
                     }
-                    
ClusterStateManager.INSTANCE.setGlobalRecoveryCompleted(true);
-                    LOGGER.info("Global Recovery Completed");
                 }
-            }, "RecoveryThread");
-            setState(newState);
-            recoveryThread.start();
+                ClusterStateManager.INSTANCE.setGlobalRecoveryCompleted(true);
+                LOGGER.info("Global Recovery Completed");
+            });
         }
+    }
+
+    private MetadataTransactionContext recoverDataset(ICcApplicationContext 
appCtx, MetadataTransactionContext mdTxnCtx,
+                                                      Dataverse dataverse)
+            throws Exception {
+        if 
(!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME))
 {
+            MetadataProvider metadataProvider = new MetadataProvider(appCtx, 
dataverse, componentProvider);
+            try {
+                List<Dataset> datasets = 
MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
+                        dataverse.getDataverseName());
+                for (Dataset dataset : datasets) {
+                    if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
+                        // External dataset
+                        // Get indexes
+                        List<Index> indexes = 
MetadataManager.INSTANCE.getDatasetIndexes(mdTxnCtx,
+                                dataset.getDataverseName(), 
dataset.getDatasetName());
+                        // Get the state of the dataset
+                        ExternalDatasetDetails dsd = (ExternalDatasetDetails) 
dataset.getDatasetDetails();
+                        TransactionState datasetState = dsd.getState();
+                        if (!indexes.isEmpty()) {
+                            if (datasetState == TransactionState.BEGIN) {
+                                List<ExternalFile> files = 
MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx,
+                                        dataset);
+                                // if persumed abort, roll backward
+                                // 1. delete all pending files
+                                for (ExternalFile file : files) {
+                                    if (file.getPendingOp() != 
ExternalFilePendingOp.NO_OP) {
+                                        
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                    }
+                                }
+                            }
+                            // 2. clean artifacts in NCs
+                            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                            JobSpecification jobSpec = 
ExternalIndexingOperations.buildAbortOp(dataset, indexes,
+                                    metadataProvider);
+                            executeHyracksJob(jobSpec);
+                            // 3. correct the dataset state
+                            ((ExternalDatasetDetails) 
dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
+                            MetadataManager.INSTANCE.updateDataset(mdTxnCtx, 
dataset);
+                            
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                            mdTxnCtx = 
MetadataManager.INSTANCE.beginTransaction();
+                        } else if (datasetState == 
TransactionState.READY_TO_COMMIT) {
+                            List<ExternalFile> files = 
MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx,
+                                    dataset);
+                            // if ready to commit, roll forward
+                            // 1. commit indexes in NCs
+                            metadataProvider.setMetadataTxnContext(mdTxnCtx);
+                            JobSpecification jobSpec = 
ExternalIndexingOperations.buildRecoverOp(dataset, indexes,
+                                    metadataProvider);
+                            executeHyracksJob(jobSpec);
+                            // 2. add pending files in metadata
+                            for (ExternalFile file : files) {
+                                if (file.getPendingOp() == 
ExternalFilePendingOp.ADD_OP) {
+                                    
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                    
file.setPendingOp(ExternalFilePendingOp.NO_OP);
+                                    
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, file);
+                                } else if (file.getPendingOp() == 
ExternalFilePendingOp.DROP_OP) {
+                                    // find original file
+                                    for (ExternalFile originalFile : files) {
+                                        if 
(originalFile.getFileName().equals(file.getFileName())) {
+                                            
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                            
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
+                                            break;
+                                        }
+                                    }
+                                } else if (file.getPendingOp() == 
ExternalFilePendingOp.APPEND_OP) {
+                                    // find original file
+                                    for (ExternalFile originalFile : files) {
+                                        if 
(originalFile.getFileName().equals(file.getFileName())) {
+                                            
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, file);
+                                            
MetadataManager.INSTANCE.dropExternalFile(mdTxnCtx, originalFile);
+                                            
originalFile.setSize(file.getSize());
+                                            
MetadataManager.INSTANCE.addExternalFile(mdTxnCtx, originalFile);
+                                        }
+                                    }
+                                }
+                                // 3. correct the dataset state
+                                ((ExternalDatasetDetails) 
dataset.getDatasetDetails())
+                                        .setState(TransactionState.COMMIT);
+                                
MetadataManager.INSTANCE.updateDataset(mdTxnCtx, dataset);
+                                
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                                mdTxnCtx = 
MetadataManager.INSTANCE.beginTransaction();
+                            }
+                        }
+                    }
+                }
+            } finally {
+                metadataProvider.getLocks().unlock();
+            }
+        }
+
+        return mdTxnCtx;
     }
 
     public static GlobalRecoveryManager instance() {
         return instance;
     }
 
-    public static synchronized void instantiate(HyracksConnection hcc, 
IStorageComponentProvider componentProvider) {
-        instance = new GlobalRecoveryManager(hcc, componentProvider);
+    public static synchronized void instantiate(ICCServiceContext 
ccServiceCtx, IHyracksClientConnection hcc,
+                                                IStorageComponentProvider 
componentProvider) {
+        instance = new GlobalRecoveryManager(ccServiceCtx, hcc, 
componentProvider);
     }
 
     public static synchronized void setState(ClusterState state) {
diff --git 
a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
 
b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index 8457681..e88f647 100644
--- 
a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ 
b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -45,6 +45,7 @@
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
+import java.util.function.Predicate;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Matcher;
@@ -404,7 +405,12 @@
     }
 
     protected HttpResponse executeAndCheckHttpRequest(HttpUriRequest method) 
throws Exception {
-        return checkResponse(executeHttpRequest(method));
+        return checkResponse(executeHttpRequest(method), code -> code == 
HttpStatus.SC_OK);
+    }
+
+    protected HttpResponse executeAndCheckHttpRequest(HttpUriRequest method, 
Predicate<Integer> responseCodeValidator)
+            throws Exception {
+        return checkResponse(executeHttpRequest(method), 
responseCodeValidator);
     }
 
     protected HttpResponse executeHttpRequest(HttpUriRequest method) throws 
Exception {
@@ -418,8 +424,9 @@
         }
     }
 
-    protected HttpResponse checkResponse(HttpResponse httpResponse) throws 
Exception {
-        if (httpResponse.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
+    protected HttpResponse checkResponse(HttpResponse httpResponse, 
Predicate<Integer> responseCodeValidator)
+            throws Exception {
+        if 
(!responseCodeValidator.test(httpResponse.getStatusLine().getStatusCode())) {
             String errorBody = EntityUtils.toString(httpResponse.getEntity());
             String exceptionMsg;
             try {
@@ -582,8 +589,13 @@
     }
 
     public InputStream executeJSONPost(OutputFormat fmt, URI uri) throws 
Exception {
+        return executeJSONPost(fmt, uri, code -> code == HttpStatus.SC_OK);
+    }
+
+    public InputStream executeJSONPost(OutputFormat fmt, URI uri, 
Predicate<Integer> responseCodeValidator)
+            throws Exception {
         HttpUriRequest request = constructPostMethod(uri, fmt, new 
ArrayList<>());
-        HttpResponse response = executeAndCheckHttpRequest(request);
+        HttpResponse response = executeAndCheckHttpRequest(request, 
responseCodeValidator);
         return response.getEntity().getContent();
     }
 
diff --git 
a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
 
b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
index 2d8a04d..48937f8 100644
--- 
a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
+++ 
b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
@@ -26,6 +26,7 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.SortedMap;
+import java.util.TreeSet;
 import java.util.concurrent.TimeUnit;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -41,6 +42,7 @@
 import org.apache.hyracks.api.config.IOption;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.control.cc.ClusterControllerService;
 import org.apache.hyracks.control.common.controllers.NCConfig;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
@@ -303,24 +305,27 @@
         stateDescription.put("metadata_node", currentMetadataNode);
         ArrayNode ncs = om.createArrayNode();
         stateDescription.set("ncs", ncs);
-        for (Map.Entry<String, ClusterPartition[]> entry : 
node2PartitionsMap.entrySet()) {
+        for (String node : new TreeSet<>(((ClusterControllerService) 
appCtx.getServiceContext().getControllerService())
+                .getNodeManager().getAllNodeIds())) {
             ObjectNode nodeJSON = om.createObjectNode();
-            nodeJSON.put("node_id", entry.getKey());
+            nodeJSON.put("node_id", node);
             boolean allActive = true;
             boolean anyActive = false;
             Set<Map<String, Object>> partitions = new HashSet<>();
-            for (ClusterPartition part : entry.getValue()) {
-                HashMap<String, Object> partition = new HashMap<>();
-                partition.put("partition_id", "partition_" + 
part.getPartitionId());
-                partition.put("active", part.isActive());
-                partitions.add(partition);
-                allActive = allActive && part.isActive();
-                if (allActive) {
-                    anyActive = true;
+            if (node2PartitionsMap.containsKey(node)) {
+                for (ClusterPartition part : node2PartitionsMap.get(node)) {
+                    HashMap<String, Object> partition = new HashMap<>();
+                    partition.put("partition_id", "partition_" + 
part.getPartitionId());
+                    partition.put("active", part.isActive());
+                    partitions.add(partition);
+                    allActive = allActive && part.isActive();
+                    if (allActive) {
+                        anyActive = true;
+                    }
                 }
             }
-            nodeJSON.put("state", failedNodes.contains(entry.getKey()) ? 
"FAILED"
-                    : allActive ? "ACTIVE" : anyActive ? "PARTIALLY_ACTIVE" : 
"INACTIVE");
+            nodeJSON.put("state", failedNodes.contains(node) ? "FAILED"
+                    : allActive && anyActive ? "ACTIVE" : anyActive ? 
"PARTIALLY_ACTIVE" : "INACTIVE");
             nodeJSON.putPOJO("partitions", partitions);
             ncs.add(nodeJSON);
         }
diff --git 
a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCServiceContext.java
 
b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCServiceContext.java
index c0e9834..94ebcfe 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCServiceContext.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/application/ICCServiceContext.java
@@ -22,6 +22,7 @@
 
 import org.apache.hyracks.api.context.ICCContext;
 import org.apache.hyracks.api.job.IJobLifecycleListener;
+import org.apache.hyracks.api.service.IControllerService;
 
 /**
  * Service Context at the Cluster Controller for an application.
diff --git 
a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
 
b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
index d6961b3..4674f9a 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/lifecycle/LifeCycleComponentManager.java
@@ -43,7 +43,7 @@
     private boolean configured;
 
     public LifeCycleComponentManager() {
-        components = new ArrayList<ILifeCycleComponent>();
+        components = new ArrayList<>();
         stopInitiated = false;
         configured = false;
         stopped = false;
@@ -51,16 +51,11 @@
 
     @Override
     public void uncaughtException(Thread t, Throwable e) {
-        if (LOGGER.isLoggable(Level.SEVERE)) {
-            LOGGER.severe("Uncaught Exception from thread " + t.getName() + " 
message: " + e.getMessage());
-            e.printStackTrace();
-        }
+        LOGGER.log(Level.SEVERE, "Uncaught Exception from thread " + 
t.getName(), e);
         try {
             stopAll(true);
         } catch (IOException e1) {
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.severe("Exception in stopping Asterix. " + 
e1.getMessage());
-            }
+            LOGGER.log(Level.SEVERE, "Exception in stopping instance", e1);
         }
     }
 
@@ -79,31 +74,25 @@
     @Override
     public synchronized void stopAll(boolean dumpState) throws IOException {
         if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.severe("Attempting to stop " + this);
+            LOGGER.info("Attempting to stop " + this);
         }
         if (stopped) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.severe("Lifecycle management was already stopped");
-            }
+            LOGGER.info("Lifecycle management was already stopped");
             return;
         }
         if (stopInitiated) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.severe("Stop already in progress");
-            }
+            LOGGER.info("Stop already in progress");
             return;
         }
         if (!configured) {
             if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.severe("Lifecycle management not configured" + this);
+                LOGGER.severe("Lifecycle management not configured " + this);
             }
             return;
         }
 
         stopInitiated = true;
-        if (LOGGER.isLoggable(Level.SEVERE)) {
-            LOGGER.severe("Stopping Asterix instance");
-        }
+        LOGGER.severe("Stopping instance");
 
         FileOutputStream componentDumpStream = null;
         String componentDumpPath = null;
@@ -120,14 +109,12 @@
                     componentDumpStream = new FileOutputStream(f);
                 }
                 if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Stopping component instance " + 
component.getClass().getName() + " dump state "
-                            + dumpState + " dump path " + componentDumpPath);
+                    LOGGER.info("Stopping component instance " + 
component.getClass().getName() + "; dump state: "
+                            + dumpState + ", dump path: " + componentDumpPath);
                 }
                 component.stop(dumpState, componentDumpStream);
             } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("Exception in stopping component " + 
component.getClass().getName() + e.getMessage());
-                }
+                LOGGER.log(Level.SEVERE, "Exception in stopping component " + 
component.getClass().getName(), e);
             } finally {
                 if (componentDumpStream != null) {
                     componentDumpStream.close();
diff --git 
a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
 
b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
index 9d649f6..11df079 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NCDriver.java
@@ -44,12 +44,7 @@
             application.registerConfig(configManager);
             NCConfig ncConfig = new NCConfig(nodeId, configManager);
             final NodeControllerService ncService = new 
NodeControllerService(ncConfig, application);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Setting uncaught exception handler " + 
ncService.getLifeCycleComponentManager());
-            }
-            
Thread.currentThread().setUncaughtExceptionHandler(ncService.getLifeCycleComponentManager());
             ncService.start();
-            Runtime.getRuntime().addShutdownHook(new 
NCShutdownHook(ncService));
             while (true) {
                 Thread.sleep(10000);
             }
diff --git 
a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
 
b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index 9242bba..be24dbe 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -256,6 +256,11 @@
     @Override
     public void start() throws Exception {
         LOGGER.log(Level.INFO, "Starting NodeControllerService");
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Setting uncaught exception handler " + 
getLifeCycleComponentManager());
+        }
+        
Thread.currentThread().setUncaughtExceptionHandler(getLifeCycleComponentManager());
+        Runtime.getRuntime().addShutdownHook(new NCShutdownHook(this));
         ipc = new IPCSystem(new 
InetSocketAddress(ncConfig.getClusterListenAddress(), 
ncConfig.getClusterListenPort()),
                 new NodeControllerIPCI(this), new 
CCNCFunctions.SerializerDeserializer());
         ipc.start();

-- 
To view, visit https://asterix-gerrit.ics.uci.edu/1706
To unsubscribe, visit https://asterix-gerrit.ics.uci.edu/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I87b6b45e1a0cdc7a8b77d80b4e603d927aa60b8a
Gerrit-PatchSet: 4
Gerrit-Project: asterixdb
Gerrit-Branch: master
Gerrit-Owner: Michael Blow <mb...@apache.org>
Gerrit-Reviewer: Jenkins <jenk...@fulliautomatix.ics.uci.edu>
Gerrit-Reviewer: Michael Blow <mb...@apache.org>
Gerrit-Reviewer: Steven Jacobs <sjaco...@ucr.edu>
Gerrit-Reviewer: Till Westmann <ti...@apache.org>

Reply via email to