Repository: incubator-rya
Updated Branches:
  refs/heads/master 4576f556a -> 7b571d43a


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7b571d43/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
----------------------------------------------------------------------
diff --git 
a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
 
b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
index 23fda51..86359f0 100644
--- 
a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
+++ 
b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/InferenceEngine.java
@@ -35,6 +35,10 @@ import java.util.Timer;
 import java.util.TimerTask;
 import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.commons.lang3.mutable.MutableObject;
 import org.apache.log4j.Logger;
@@ -76,35 +80,38 @@ public class InferenceEngine {
     private static final ValueFactory VF = ValueFactoryImpl.getInstance();
     private static final URI HAS_SELF = VF.createURI(OWL.NAMESPACE, "hasSelf");
     private static final URI REFLEXIVE_PROPERTY = VF.createURI(OWL.NAMESPACE, 
"ReflexiveProperty");
+    public static final String URI_PROP = "uri";
 
-    private Graph subClassOfGraph;
-    private Graph subPropertyOfGraph;
-    private Set<URI> symmetricPropertySet;
-    private Map<URI, URI> inverseOfMap;
-    private Set<URI> transitivePropertySet;
-    private Set<URI> reflexivePropertySet;
-    private Map<URI, Set<URI>> domainByType;
-    private Map<URI, Set<URI>> rangeByType;
-    private Map<Resource, Map<URI, Value>> hasValueByType;
-    private Map<URI, Map<Resource, Value>> hasValueByProperty;
-    private Map<Resource, Map<Resource, URI>> someValuesFromByRestrictionType;
-    private Map<Resource, Map<Resource, URI>> allValuesFromByValueType;
-    private final ConcurrentHashMap<Resource, List<Set<Resource>>> 
intersections = new ConcurrentHashMap<>();
-    private final ConcurrentHashMap<Resource, Set<Resource>> enumerations = 
new ConcurrentHashMap<>();
+    private final ReentrantLock refreshLock = new ReentrantLock();
+
+    private final AtomicReference<Graph> subClassOfGraph = new 
AtomicReference<>();
+    private final AtomicReference<Graph> subPropertyOfGraph = new 
AtomicReference<>();
+
+    private final Set<URI> symmetricPropertySet = 
ConcurrentHashMap.newKeySet();;
+    private final Map<URI, URI> inverseOfMap = new ConcurrentHashMap<>();
+    private final Set<URI> transitivePropertySet = 
ConcurrentHashMap.newKeySet();;
+    private final Set<URI> reflexivePropertySet = 
ConcurrentHashMap.newKeySet();;
+    private final Map<URI, Set<URI>> domainByType = new ConcurrentHashMap<>();
+    private final Map<URI, Set<URI>> rangeByType = new ConcurrentHashMap<>();
+    private final Map<Resource, Map<URI, Value>> hasValueByType = new 
ConcurrentHashMap<>();
+    private final Map<URI, Map<Resource, Value>> hasValueByProperty = new 
ConcurrentHashMap<>();
+    private final Map<Resource, Map<Resource, URI>> 
someValuesFromByRestrictionType = new ConcurrentHashMap<>();
+    private final Map<Resource, Map<Resource, URI>> allValuesFromByValueType = 
new ConcurrentHashMap<>();
+    private final Map<Resource, List<Set<Resource>>> intersections = new 
ConcurrentHashMap<>();
+    private final Map<Resource, Set<Resource>> enumerations = new 
ConcurrentHashMap<>();
+    private final Map<URI, List<URI>> propertyChainPropertyToChain = new 
ConcurrentHashMap<>();
     // hasSelf maps.
-    private Map<URI, Set<Resource>> hasSelfByProperty;
-    private Map<Resource, Set<URI>> hasSelfByType;
+    private final Map<URI, Set<Resource>> hasSelfByProperty = new 
ConcurrentHashMap<>();
+    private final Map<Resource, Set<URI>> hasSelfByType = new 
ConcurrentHashMap<>();
 
     private RyaDAO<?> ryaDAO;
     private RdfCloudTripleStoreConfiguration conf;
     private RyaDaoQueryWrapper ryaDaoQueryWrapper;
-    private boolean initialized = false;
-    private boolean schedule = true;
+    private final AtomicBoolean isInitialized = new AtomicBoolean();
+    private final AtomicBoolean schedule = new AtomicBoolean(true);
 
-    private long refreshGraphSchedule = 5 * 60 * 1000; //5 min
+    private final AtomicLong refreshGraphSchedule = new AtomicLong(5 * 60 * 
1000); //5 min
     private Timer timer;
-    private HashMap<URI, List<URI>> propertyChainPropertyToChain = new 
HashMap<>();
-    public static final String URI_PROP = "uri";
 
     public void init() throws InferenceEngineException {
         try {
@@ -117,11 +124,11 @@ public class InferenceEngine {
             checkArgument(ryaDAO.isInitialized(), "RdfDao is not initialized");
             ryaDaoQueryWrapper = new RyaDaoQueryWrapper(ryaDAO, conf);
 
-            if (schedule) {
-                refreshGraph();
+            refreshGraph();
+
+            if (schedule.get()) {
                 timer = new Timer(InferenceEngine.class.getName());
                 timer.scheduleAtFixedRate(new TimerTask() {
-
                     @Override
                     public void run() {
                         try {
@@ -130,10 +137,9 @@ public class InferenceEngine {
                             throw new RuntimeException(e);
                         }
                     }
-
-                }, refreshGraphSchedule, refreshGraphSchedule);
+                }, refreshGraphSchedule.get(), refreshGraphSchedule.get());
             }
-            refreshGraph();
+
             setInitialized(true);
         } catch (final RyaDAOException e) {
             throw new InferenceEngineException(e);
@@ -148,237 +154,51 @@ public class InferenceEngine {
     }
 
     public void refreshGraph() throws InferenceEngineException {
+        refreshLock.lock();
         try {
-            CloseableIteration<Statement, QueryEvaluationException> iter;
             //get all subclassof
             Graph graph = TinkerGraph.open();
             addPredicateEdges(RDFS.SUBCLASSOF, Direction.OUT, graph, 
RDFS.SUBCLASSOF.stringValue());
             //equivalentClass is the same as subClassOf both ways
             addPredicateEdges(OWL.EQUIVALENTCLASS, Direction.BOTH, graph, 
RDFS.SUBCLASSOF.stringValue());
-            // Add unions to the subclass graph: if c owl:unionOf LIST(c1, c2, 
... cn), then any
-            // instances of c1, c2, ... or cn are also instances of c, meaning 
c is a superclass
-            // of all the rest.
-            // (In principle, an instance of c is likewise implied to be at 
least one of the other
-            // types, but this fact is ignored for now to avoid 
nondeterministic reasoning.)
-            iter = RyaDAOHelper.query(ryaDAO, null, OWL.UNIONOF, null, conf);
-            try {
-                while (iter.hasNext()) {
-                    final Statement st = iter.next();
-                    final Value unionType = st.getSubject();
-                    // Traverse the list of types constituting the union
-                    Value current = st.getObject();
-                    while (current instanceof Resource && 
!RDF.NIL.equals(current)) {
-                        final Resource listNode = (Resource) current;
-                        CloseableIteration<Statement, 
QueryEvaluationException> listIter = RyaDAOHelper.query(ryaDAO,
-                                listNode, RDF.FIRST, null, conf);
-                        try {
-                            if (listIter.hasNext()) {
-                                final Statement firstStatement = 
listIter.next();
-                                if (firstStatement.getObject() instanceof 
Resource) {
-                                    final Resource subclass = (Resource) 
firstStatement.getObject();
-                                    final Statement subclassStatement = 
VF.createStatement(subclass, RDFS.SUBCLASSOF, unionType);
-                                    addStatementEdge(graph, 
RDFS.SUBCLASSOF.stringValue(), subclassStatement);
-                                }
-                            }
-                        } finally {
-                            listIter.close();
-                        }
-                        listIter = RyaDAOHelper.query(ryaDAO, listNode, 
RDF.REST, null, conf);
-                        try {
-                            if (listIter.hasNext()) {
-                                current = listIter.next().getObject();
-                            }
-                            else {
-                                current = RDF.NIL;
-                            }
-                        } finally {
-                            listIter.close();
-                        }
-                    }
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
-            }
-            subClassOfGraph = graph; //TODO: Should this be synchronized?
+            // Add unions to the subclass graph
+            addUnions(graph);
+            subClassOfGraph.set(graph);
 
             graph = TinkerGraph.open();
             addPredicateEdges(RDFS.SUBPROPERTYOF, Direction.OUT, graph, 
RDFS.SUBPROPERTYOF.stringValue());
             //equiv property really is the same as a subPropertyOf both ways
             addPredicateEdges(OWL.EQUIVALENTPROPERTY, Direction.BOTH, graph, 
RDFS.SUBPROPERTYOF.stringValue());
-            subPropertyOfGraph = graph; //TODO: Should this be synchronized?
+            subPropertyOfGraph.set(graph);
 
             refreshIntersectionOf();
 
             refreshOneOf();
 
-            symmetricPropertySet = fetchInstances(OWL.SYMMETRICPROPERTY);
-            transitivePropertySet = fetchInstances(OWL.TRANSITIVEPROPERTY);
-            reflexivePropertySet = fetchInstances(REFLEXIVE_PROPERTY);
-
-            iter = RyaDAOHelper.query(ryaDAO, null, OWL.INVERSEOF, null, conf);
-            final Map<URI, URI> invProp = new HashMap<>();
-            try {
-                while (iter.hasNext()) {
-                    final Statement st = iter.next();
-                    invProp.put((URI) st.getSubject(), (URI) st.getObject());
-                    invProp.put((URI) st.getObject(), (URI) st.getSubject());
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
+            synchronized(symmetricPropertySet) {
+                symmetricPropertySet.clear();
+                
symmetricPropertySet.addAll(fetchInstances(OWL.SYMMETRICPROPERTY));
             }
-            inverseOfMap = invProp;
-
-            iter = RyaDAOHelper.query(ryaDAO, null,
-                    
VF.createURI("http://www.w3.org/2002/07/owl#propertyChainAxiom";),
-                    null, conf);
-            final Map<URI,URI> propertyChainPropertiesToBNodes = new 
HashMap<>();
-            propertyChainPropertyToChain = new HashMap<>();
-            try {
-                while (iter.hasNext()){
-                    final Statement st = iter.next();
-                    propertyChainPropertiesToBNodes.put((URI)st.getSubject(), 
(URI)st.getObject());
-                }
-            } finally {
-                if (iter != null) {
-                    iter.close();
-                }
+            synchronized(transitivePropertySet) {
+                transitivePropertySet.clear();
+                
transitivePropertySet.addAll(fetchInstances(OWL.TRANSITIVEPROPERTY));
             }
-            // now for each property chain bNode, get the indexed list of 
properties associated with that chain
-            for (final URI propertyChainProperty : 
propertyChainPropertiesToBNodes.keySet()){
-                final URI bNode = 
propertyChainPropertiesToBNodes.get(propertyChainProperty);
-                // query for the list of indexed properties
-                iter = RyaDAOHelper.query(ryaDAO, bNode, 
VF.createURI("http://www.w3.org/2000/10/swap/list#index";),
-                        null, conf);
-                final TreeMap<Integer, URI> orderedProperties = new 
TreeMap<>();
-                // TODO refactor this.  Wish I could execute sparql
-                try {
-                    while (iter.hasNext()){
-                        final Statement st = iter.next();
-                        final String indexedElement = 
st.getObject().stringValue();
-                        log.info(indexedElement);
-                        CloseableIteration<Statement, 
QueryEvaluationException>  iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(st.getObject().stringValue()), RDF.FIRST,
-                                null, conf);
-                        String integerValue = "";
-                        Value anonPropNode = null;
-                        Value propURI = null;
-                        if (iter2 != null){
-                            while (iter2.hasNext()){
-                                final Statement iter2Statement = iter2.next();
-                                integerValue = 
iter2Statement.getObject().stringValue();
-                                break;
-                            }
-                            iter2.close();
-                        }
-                        iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(st.getObject().stringValue()), RDF.REST,
-                                null, conf);
-                        if (iter2 != null){
-                            while (iter2.hasNext()){
-                                final Statement iter2Statement = iter2.next();
-                                anonPropNode = iter2Statement.getObject();
-                                break;
-                            }
-                            iter2.close();
-                            if (anonPropNode != null){
-                                iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(anonPropNode.stringValue()), RDF.FIRST,
-                                        null, conf);
-                                while (iter2.hasNext()){
-                                    final Statement iter2Statement = 
iter2.next();
-                                    propURI = iter2Statement.getObject();
-                                    break;
-                                }
-                                iter2.close();
-                            }
-                        }
-                        if (!integerValue.isEmpty() && propURI!=null) {
-                            try {
-                                final int indexValue = 
Integer.parseInt(integerValue);
-                                final URI chainPropURI = 
VF.createURI(propURI.stringValue());
-                                orderedProperties.put(indexValue, 
chainPropURI);
-                            }
-                            catch (final Exception ex){
-                                // TODO log an error here
-
-                            }
-                        }
-                    }
-                } finally{
-                    if (iter != null){
-                        iter.close();
-                    }
-                }
-                final List<URI> properties = new ArrayList<>();
-                for (final Map.Entry<Integer, URI> entry : 
orderedProperties.entrySet()){
-                    properties.add(entry.getValue());
-                }
-                propertyChainPropertyToChain.put(propertyChainProperty, 
properties);
+            synchronized(reflexivePropertySet) {
+                reflexivePropertySet.clear();
+                
reflexivePropertySet.addAll(fetchInstances(REFLEXIVE_PROPERTY));
             }
 
-            // could also be represented as a list of properties (some of 
which may be blank nodes)
-            for (final URI propertyChainProperty : 
propertyChainPropertiesToBNodes.keySet()){
-                final List<URI> existingChain = 
propertyChainPropertyToChain.get(propertyChainProperty);
-                // if we didn't get a chain, try to get it through following 
the collection
-                if ((existingChain == null) || existingChain.isEmpty()) {
+            refreshInverseOf();
 
-                    CloseableIteration<Statement, QueryEvaluationException>  
iter2 = RyaDAOHelper.query(ryaDAO, 
propertyChainPropertiesToBNodes.get(propertyChainProperty), RDF.FIRST,
-                            null, conf);
-                    final List<URI> properties = new ArrayList<>();
-                    URI previousBNode = 
propertyChainPropertiesToBNodes.get(propertyChainProperty);
-                    if (iter2.hasNext()) {
-                        Statement iter2Statement = iter2.next();
-                        Value currentPropValue = iter2Statement.getObject();
-                        while ((currentPropValue != null) && 
(!currentPropValue.stringValue().equalsIgnoreCase(RDF.NIL.stringValue()))){
-                            if (currentPropValue instanceof URI){
-                                iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(currentPropValue.stringValue()), RDF.FIRST,
-                                        null, conf);
-                                if (iter2.hasNext()){
-                                    iter2Statement = iter2.next();
-                                    if (iter2Statement.getObject() instanceof 
URI){
-                                        
properties.add((URI)iter2Statement.getObject());
-                                    }
-                                }
-                                // otherwise see if there is an inverse 
declaration
-                                else {
-                                    iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(currentPropValue.stringValue()), OWL.INVERSEOF,
-                                            null, conf);
-                                    if (iter2.hasNext()){
-                                        iter2Statement = iter2.next();
-                                        if (iter2Statement.getObject() 
instanceof URI){
-                                            properties.add(new 
InverseURI((URI)iter2Statement.getObject()));
-                                        }
-                                    }
-                                }
-                                // get the next prop pointer
-                                iter2 = RyaDAOHelper.query(ryaDAO, 
previousBNode, RDF.REST,
-                                        null, conf);
-                                if (iter2.hasNext()){
-                                    iter2Statement = iter2.next();
-                                    previousBNode = (URI)currentPropValue;
-                                    currentPropValue = 
iter2Statement.getObject();
-                                }
-                                else {
-                                    currentPropValue = null;
-                                }
-                            }
-                            else {
-                                currentPropValue = null;
-                            }
-
-                        }
-                        
propertyChainPropertyToChain.put(propertyChainProperty, properties);
-                    }
-                }
-            }
+            refreshPropertyChainPropertyToChain();
 
             refreshDomainRange();
 
             refreshPropertyRestrictions();
-
         } catch (final QueryEvaluationException e) {
             throw new InferenceEngineException(e);
+        } finally {
+            refreshLock.unlock();
         }
     }
 
@@ -434,6 +254,227 @@ public class InferenceEngine {
     }
 
     /**
+     * Add unions to the subclass graph: if c owl:unionOf LIST(c1, c2, ... cn),
+     * then any instances of c1, c2, ... or cn are also instances of c, meaning
+     * c is a superclass of all the rest.
+     * (In principle, an instance of c is likewise implied to be at least one 
of
+     * the other types, but this fact is ignored for now to avoid
+     * nondeterministic reasoning.)
+     * @param graph the {@link Graph} to add to.
+     * @throws QueryEvaluationException
+     */
+    private void addUnions(final Graph graph) throws QueryEvaluationException {
+        final CloseableIteration<Statement, QueryEvaluationException> iter = 
RyaDAOHelper.query(ryaDAO, null, OWL.UNIONOF, null, conf);
+        try {
+            while (iter.hasNext()) {
+                final Statement st = iter.next();
+                final Value unionType = st.getSubject();
+                // Traverse the list of types constituting the union
+                Value current = st.getObject();
+                while (current instanceof Resource && 
!RDF.NIL.equals(current)) {
+                    final Resource listNode = (Resource) current;
+                    CloseableIteration<Statement, QueryEvaluationException> 
listIter = RyaDAOHelper.query(ryaDAO,
+                            listNode, RDF.FIRST, null, conf);
+                    try {
+                        if (listIter.hasNext()) {
+                            final Statement firstStatement = listIter.next();
+                            if (firstStatement.getObject() instanceof 
Resource) {
+                                final Resource subclass = (Resource) 
firstStatement.getObject();
+                                final Statement subclassStatement = 
VF.createStatement(subclass, RDFS.SUBCLASSOF, unionType);
+                                addStatementEdge(graph, 
RDFS.SUBCLASSOF.stringValue(), subclassStatement);
+                            }
+                        }
+                    } finally {
+                        listIter.close();
+                    }
+                    listIter = RyaDAOHelper.query(ryaDAO, listNode, RDF.REST, 
null, conf);
+                    try {
+                        if (listIter.hasNext()) {
+                            current = listIter.next().getObject();
+                        }
+                        else {
+                            current = RDF.NIL;
+                        }
+                    } finally {
+                        listIter.close();
+                    }
+                }
+            }
+        } finally {
+            if (iter != null) {
+                iter.close();
+            }
+        }
+    }
+
+    private void refreshInverseOf() throws QueryEvaluationException {
+        final CloseableIteration<Statement, QueryEvaluationException> iter = 
RyaDAOHelper.query(ryaDAO, null, OWL.INVERSEOF, null, conf);
+        final Map<URI, URI> invProp = new HashMap<>();
+        try {
+            while (iter.hasNext()) {
+                final Statement st = iter.next();
+                invProp.put((URI) st.getSubject(), (URI) st.getObject());
+                invProp.put((URI) st.getObject(), (URI) st.getSubject());
+            }
+        } finally {
+            if (iter != null) {
+                iter.close();
+            }
+        }
+        synchronized(inverseOfMap) {
+            inverseOfMap.clear();
+            inverseOfMap.putAll(invProp);
+        }
+    }
+
+    private void refreshPropertyChainPropertyToChain() throws 
QueryEvaluationException {
+        CloseableIteration<Statement, QueryEvaluationException> iter = 
RyaDAOHelper.query(ryaDAO, null,
+                
VF.createURI("http://www.w3.org/2002/07/owl#propertyChainAxiom";),
+                null, conf);
+        final Map<URI,URI> propertyChainPropertiesToBNodes = new HashMap<>();
+        final Map<URI, List<URI>> tempPropertyChainPropertyToChain = new 
HashMap<>();
+        try {
+            while (iter.hasNext()){
+                final Statement st = iter.next();
+                propertyChainPropertiesToBNodes.put((URI)st.getSubject(), 
(URI)st.getObject());
+            }
+        } finally {
+            if (iter != null) {
+                iter.close();
+            }
+        }
+        // now for each property chain bNode, get the indexed list of 
properties associated with that chain
+        for (final URI propertyChainProperty : 
propertyChainPropertiesToBNodes.keySet()){
+            final URI bNode = 
propertyChainPropertiesToBNodes.get(propertyChainProperty);
+            // query for the list of indexed properties
+            iter = RyaDAOHelper.query(ryaDAO, bNode, 
VF.createURI("http://www.w3.org/2000/10/swap/list#index";),
+                    null, conf);
+            final TreeMap<Integer, URI> orderedProperties = new TreeMap<>();
+            // TODO refactor this.  Wish I could execute sparql
+            try {
+                while (iter.hasNext()){
+                    final Statement st = iter.next();
+                    final String indexedElement = st.getObject().stringValue();
+                    log.info(indexedElement);
+                    CloseableIteration<Statement, QueryEvaluationException>  
iter2 = RyaDAOHelper.query(ryaDAO, VF.createURI(st.getObject().stringValue()), 
RDF.FIRST,
+                            null, conf);
+                    String integerValue = "";
+                    Value anonPropNode = null;
+                    Value propURI = null;
+                    if (iter2 != null){
+                        while (iter2.hasNext()){
+                            final Statement iter2Statement = iter2.next();
+                            integerValue = 
iter2Statement.getObject().stringValue();
+                            break;
+                        }
+                        iter2.close();
+                    }
+                    iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(st.getObject().stringValue()), RDF.REST,
+                            null, conf);
+                    if (iter2 != null){
+                        while (iter2.hasNext()){
+                            final Statement iter2Statement = iter2.next();
+                            anonPropNode = iter2Statement.getObject();
+                            break;
+                        }
+                        iter2.close();
+                        if (anonPropNode != null){
+                            iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(anonPropNode.stringValue()), RDF.FIRST,
+                                    null, conf);
+                            while (iter2.hasNext()){
+                                final Statement iter2Statement = iter2.next();
+                                propURI = iter2Statement.getObject();
+                                break;
+                            }
+                            iter2.close();
+                        }
+                    }
+                    if (!integerValue.isEmpty() && propURI!=null) {
+                        try {
+                            final int indexValue = 
Integer.parseInt(integerValue);
+                            final URI chainPropURI = 
VF.createURI(propURI.stringValue());
+                            orderedProperties.put(indexValue, chainPropURI);
+                        }
+                        catch (final Exception e){
+                            log.error("Error adding chain property to ordered 
properties", e);
+                        }
+                    }
+                }
+            } finally{
+                if (iter != null){
+                    iter.close();
+                }
+            }
+            final List<URI> properties = new ArrayList<>();
+            for (final Map.Entry<Integer, URI> entry : 
orderedProperties.entrySet()){
+                properties.add(entry.getValue());
+            }
+            tempPropertyChainPropertyToChain.put(propertyChainProperty, 
properties);
+        }
+
+        // could also be represented as a list of properties (some of which 
may be blank nodes)
+        for (final URI propertyChainProperty : 
propertyChainPropertiesToBNodes.keySet()){
+            final List<URI> existingChain = 
tempPropertyChainPropertyToChain.get(propertyChainProperty);
+            // if we didn't get a chain, try to get it through following the 
collection
+            if ((existingChain == null) || existingChain.isEmpty()) {
+
+                CloseableIteration<Statement, QueryEvaluationException>  iter2 
= RyaDAOHelper.query(ryaDAO, 
propertyChainPropertiesToBNodes.get(propertyChainProperty), RDF.FIRST,
+                        null, conf);
+                final List<URI> properties = new ArrayList<>();
+                URI previousBNode = 
propertyChainPropertiesToBNodes.get(propertyChainProperty);
+                if (iter2.hasNext()) {
+                    Statement iter2Statement = iter2.next();
+                    Value currentPropValue = iter2Statement.getObject();
+                    while ((currentPropValue != null) && 
(!currentPropValue.stringValue().equalsIgnoreCase(RDF.NIL.stringValue()))){
+                        if (currentPropValue instanceof URI){
+                            iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(currentPropValue.stringValue()), RDF.FIRST,
+                                    null, conf);
+                            if (iter2.hasNext()){
+                                iter2Statement = iter2.next();
+                                if (iter2Statement.getObject() instanceof URI){
+                                    
properties.add((URI)iter2Statement.getObject());
+                                }
+                            }
+                            // otherwise see if there is an inverse declaration
+                            else {
+                                iter2 = RyaDAOHelper.query(ryaDAO, 
VF.createURI(currentPropValue.stringValue()), OWL.INVERSEOF,
+                                        null, conf);
+                                if (iter2.hasNext()){
+                                    iter2Statement = iter2.next();
+                                    if (iter2Statement.getObject() instanceof 
URI){
+                                        properties.add(new 
InverseURI((URI)iter2Statement.getObject()));
+                                    }
+                                }
+                            }
+                            // get the next prop pointer
+                            iter2 = RyaDAOHelper.query(ryaDAO, previousBNode, 
RDF.REST,
+                                    null, conf);
+                            if (iter2.hasNext()){
+                                iter2Statement = iter2.next();
+                                previousBNode = (URI)currentPropValue;
+                                currentPropValue = iter2Statement.getObject();
+                            }
+                            else {
+                                currentPropValue = null;
+                            }
+                        }
+                        else {
+                            currentPropValue = null;
+                        }
+
+                    }
+                    
tempPropertyChainPropertyToChain.put(propertyChainProperty, properties);
+                }
+            }
+        }
+
+        synchronized(propertyChainPropertyToChain) {
+            propertyChainPropertyToChain.clear();
+            
propertyChainPropertyToChain.putAll(tempPropertyChainPropertyToChain);
+        }
+    }
+
+    /**
      * Queries domain and range information, then populates the inference 
engine with direct
      * domain/range relations and any that can be inferred from the subclass 
graph, subproperty
      * graph, and inverse property map. Should be called after that class and 
property information
@@ -587,8 +628,14 @@ public class InferenceEngine {
                 }
             }
         }
-        domainByType = domainByTypePartial;
-        rangeByType = rangeByTypePartial;
+        synchronized(domainByType) {
+            domainByType.clear();
+            domainByType.putAll(domainByTypePartial);
+        }
+        synchronized(rangeByType) {
+            rangeByType.clear();
+            rangeByType.putAll(rangeByTypePartial);
+        }
     }
 
     private void refreshPropertyRestrictions() throws QueryEvaluationException 
{
@@ -613,8 +660,8 @@ public class InferenceEngine {
     }
 
     private void refreshHasValueRestrictions(final Map<Resource, URI> 
restrictions) throws QueryEvaluationException {
-        hasValueByType = new HashMap<>();
-        hasValueByProperty = new HashMap<>();
+        hasValueByType.clear();
+        hasValueByProperty.clear();
         final CloseableIteration<Statement, QueryEvaluationException> iter = 
RyaDAOHelper.query(ryaDAO, null, OWL.HASVALUE, null, conf);
         try {
             while (iter.hasNext()) {
@@ -641,7 +688,7 @@ public class InferenceEngine {
     }
 
     private void refreshSomeValuesFromRestrictions(final Map<Resource, URI> 
restrictions) throws QueryEvaluationException {
-        someValuesFromByRestrictionType = new ConcurrentHashMap<>();
+        someValuesFromByRestrictionType.clear();
         ryaDaoQueryWrapper.queryAll(null, OWL.SOMEVALUESFROM, null, new 
RDFHandlerBase() {
             @Override
             public void handleStatement(final Statement statement) throws 
RDFHandlerException {
@@ -667,7 +714,7 @@ public class InferenceEngine {
     }
 
     private void refreshAllValuesFromRestrictions(final Map<Resource, URI> 
restrictions) throws QueryEvaluationException {
-        allValuesFromByValueType = new ConcurrentHashMap<>();
+        allValuesFromByValueType.clear();
         ryaDaoQueryWrapper.queryAll(null, OWL.ALLVALUESFROM, null, new 
RDFHandlerBase() {
             @Override
             public void handleStatement(final Statement statement) throws 
RDFHandlerException {
@@ -693,8 +740,8 @@ public class InferenceEngine {
     }
 
     private void refreshHasSelfRestrictions(final Map<Resource, URI> 
restrictions) throws QueryEvaluationException {
-        hasSelfByType = new HashMap<>();
-        hasSelfByProperty = new HashMap<>();
+        hasSelfByType.clear();
+        hasSelfByProperty.clear();
 
         for(final Resource type : restrictions.keySet()) {
             final URI property = restrictions.get(type);
@@ -858,8 +905,10 @@ public class InferenceEngine {
             }
         });
 
-        enumerations.clear();
-        enumerations.putAll(enumTypes);
+        synchronized(enumerations) {
+            enumerations.clear();
+            enumerations.putAll(enumTypes);
+        }
     }
 
     /**
@@ -869,7 +918,7 @@ public class InferenceEngine {
      *
      * This takes into account type hierarchy, where children of a type that
      * have this property are also assumed to have the property.
-     * 
+     *
      * @param type
      *            The type (URI or bnode) to check against the known
      *            restrictions
@@ -887,7 +936,7 @@ public class InferenceEngine {
         }
         //findParent gets all subclasses, add self.
         if (type instanceof URI) {
-            for (final URI subtype : findParents(subClassOfGraph, (URI) type)) 
{
+            for (final URI subtype : findParents(subClassOfGraph.get(), (URI) 
type)) {
                 tempProperties = hasSelfByType.get(subtype);
                 if (tempProperties != null) {
                     properties.addAll(tempProperties);
@@ -915,11 +964,10 @@ public class InferenceEngine {
 
         if (baseTypes != null) {
             types.addAll(baseTypes);
-
             // findParent gets all subclasses, add self.
             for (final Resource baseType : baseTypes) {
                 if (baseType instanceof URI) {
-                    types.addAll(findParents(subClassOfGraph, (URI) baseType));
+                    types.addAll(findParents(subClassOfGraph.get(), (URI) 
baseType));
                 }
             }
         }
@@ -983,7 +1031,8 @@ public class InferenceEngine {
     private void addSubClassOf(final Resource s, final Resource o) {
         final Statement statement = new StatementImpl(s, RDFS.SUBCLASSOF, o);
         final String edgeName = RDFS.SUBCLASSOF.stringValue();
-        addStatementEdge(subClassOfGraph, edgeName, statement);
+
+        addStatementEdge(subClassOfGraph.get(), edgeName, statement);
     }
 
     private void addIntersection(final Set<Resource> intersection, final 
Resource type) {
@@ -1032,7 +1081,7 @@ public class InferenceEngine {
      * or if either type or the subclass graph is {@code null}.
      */
     public Set<URI> getSuperClasses(final URI type) {
-        return findChildren(subClassOfGraph, type);
+        return findChildren(subClassOfGraph.get(), type);
     }
 
     /**
@@ -1044,7 +1093,7 @@ public class InferenceEngine {
      * or if either type or the subclass graph is {@code null}.
      */
     public Set<URI> getSubClasses(final URI type) {
-        return findParents(subClassOfGraph, type);
+        return findParents(subClassOfGraph.get(), type);
     }
 
     /**
@@ -1056,7 +1105,7 @@ public class InferenceEngine {
      * or if either property or the subproperty graph is {@code null}.
      */
     public Set<URI> getSuperProperties(final URI property) {
-        return findChildren(subPropertyOfGraph, property);
+        return findChildren(subPropertyOfGraph.get(), property);
     }
 
     /**
@@ -1068,7 +1117,7 @@ public class InferenceEngine {
      * or if either property or the subproperty graph is {@code null}.
      */
     public Set<URI> getSubProperties(final URI property) {
-        return findParents(subPropertyOfGraph, property);
+        return findParents(subPropertyOfGraph.get(), property);
     }
 
     /**
@@ -1283,32 +1332,32 @@ public class InferenceEngine {
     }
 
     public boolean isInitialized() {
-        return initialized;
+        return isInitialized.get();
     }
 
-    public void setInitialized(final boolean initialized) {
-        this.initialized = initialized;
+    public void setInitialized(final boolean isInitialized) {
+        this.isInitialized.set(isInitialized);
     }
 
-    public RyaDAO<?> getRyaDAO() {
+    public synchronized RyaDAO<?> getRyaDAO() {
         return ryaDAO;
     }
 
-    public void setRyaDAO(final RyaDAO<?> ryaDAO) {
+    public synchronized void setRyaDAO(final RyaDAO<?> ryaDAO) {
         this.ryaDAO = ryaDAO;
         ryaDaoQueryWrapper = new RyaDaoQueryWrapper(ryaDAO);
     }
 
-    public RdfCloudTripleStoreConfiguration getConf() {
+    public synchronized RdfCloudTripleStoreConfiguration getConf() {
         return conf;
     }
 
-    public void setConf(final RdfCloudTripleStoreConfiguration conf) {
+    public synchronized void setConf(final RdfCloudTripleStoreConfiguration 
conf) {
         this.conf = conf;
     }
 
     public Graph getSubClassOfGraph() {
-        return subClassOfGraph;
+        return subClassOfGraph.get();
     }
 
     public Map<URI, List<URI>> getPropertyChainMap() {
@@ -1323,47 +1372,35 @@ public class InferenceEngine {
     }
 
     public Graph getSubPropertyOfGraph() {
-        return subPropertyOfGraph;
+        return subPropertyOfGraph.get();
     }
 
     public long getRefreshGraphSchedule() {
-        return refreshGraphSchedule;
+        return refreshGraphSchedule.get();
     }
 
     public void setRefreshGraphSchedule(final long refreshGraphSchedule) {
-        this.refreshGraphSchedule = refreshGraphSchedule;
+        this.refreshGraphSchedule.set(refreshGraphSchedule);
     }
 
     public Set<URI> getSymmetricPropertySet() {
         return symmetricPropertySet;
     }
 
-    public void setSymmetricPropertySet(final Set<URI> symmetricPropertySet) {
-        this.symmetricPropertySet = symmetricPropertySet;
-    }
-
     public Map<URI, URI> getInverseOfMap() {
         return inverseOfMap;
     }
 
-    public void setInverseOfMap(final Map<URI, URI> inverseOfMap) {
-        this.inverseOfMap = inverseOfMap;
-    }
-
     public Set<URI> getTransitivePropertySet() {
         return transitivePropertySet;
     }
 
-    public void setTransitivePropertySet(final Set<URI> transitivePropertySet) 
{
-        this.transitivePropertySet = transitivePropertySet;
-    }
-
     public boolean isSchedule() {
-        return schedule;
+        return schedule.get();
     }
 
     public void setSchedule(final boolean schedule) {
-        this.schedule = schedule;
+        this.schedule.set(schedule);
     }
 
     /**
@@ -1532,7 +1569,7 @@ public class InferenceEngine {
      *      to the restriction type. Empty map if the parameter is {@code 
null} or if the
      *      someValuesFrom schema has not been populated.
      */
-    public Map<Resource, Set<URI>> getSomeValuesFromByRestrictionType(Resource 
restrictionType) {
+    public Map<Resource, Set<URI>> getSomeValuesFromByRestrictionType(final 
Resource restrictionType) {
         return getTypePropertyImplyingType(restrictionType, 
someValuesFromByRestrictionType);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7b571d43/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java
----------------------------------------------------------------------
diff --git 
a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java
 
b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java
index 251aa0c..ae7e059 100644
--- 
a/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java
+++ 
b/sail/src/main/java/org/apache/rya/rdftriplestore/inference/PropertyChainVisitor.java
@@ -83,6 +83,9 @@ public class PropertyChainVisitor extends 
AbstractInferVisitor {
                     nextSubj = currentObj;
 
                 }
+                if (lastStatementPatternAdded == null) {
+                    throw new NullPointerException("lastStatementPatternAdded 
is null despite non-empty inferenceEngine property chain. chain.size()==" + 
chain.size());
+                }
                 lastStatementPatternAdded.setObjectVar(originalObj);
 
                 TupleExpr lastRight = null;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7b571d43/sail/src/main/java/org/apache/rya/rdftriplestore/namespace/NamespaceManager.java
----------------------------------------------------------------------
diff --git 
a/sail/src/main/java/org/apache/rya/rdftriplestore/namespace/NamespaceManager.java
 
b/sail/src/main/java/org/apache/rya/rdftriplestore/namespace/NamespaceManager.java
index 3664d64..3d8526f 100644
--- 
a/sail/src/main/java/org/apache/rya/rdftriplestore/namespace/NamespaceManager.java
+++ 
b/sail/src/main/java/org/apache/rya/rdftriplestore/namespace/NamespaceManager.java
@@ -1,5 +1,14 @@
 package org.apache.rya.rdftriplestore.namespace;
 
+import java.io.InputStream;
+
+import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
+import org.apache.rya.api.persist.RdfDAOException;
+import org.apache.rya.api.persist.RyaDAO;
+import org.apache.rya.api.persist.RyaNamespaceManager;
+import org.openrdf.model.Namespace;
+import org.openrdf.sail.SailException;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,18 +31,10 @@ package org.apache.rya.rdftriplestore.namespace;
 
 
 import info.aduna.iteration.CloseableIteration;
-import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
-import org.apache.rya.api.persist.RdfDAOException;
-import org.apache.rya.api.persist.RyaDAO;
-import org.apache.rya.api.persist.RyaNamespaceManager;
 import net.sf.ehcache.Cache;
 import net.sf.ehcache.CacheManager;
 import net.sf.ehcache.Element;
 import net.sf.ehcache.Statistics;
-import org.openrdf.model.Namespace;
-import org.openrdf.sail.SailException;
-
-import java.io.InputStream;
 
 /**
  * Class NamespaceManager
@@ -53,10 +54,9 @@ public class NamespaceManager {
     }
 
     protected void initialize(RyaDAO ryaDAO) {
-        try {
-            this.namespaceManager = ryaDAO.getNamespaceManager();
+        this.namespaceManager = ryaDAO.getNamespaceManager();
 
-            InputStream cacheConfigStream = 
Thread.currentThread().getContextClassLoader().getResourceAsStream("ehcache.xml");
+        try (InputStream cacheConfigStream = 
Thread.currentThread().getContextClassLoader().getResourceAsStream("ehcache.xml"))
 {
             if (cacheConfigStream == null) {
                 this.cacheManager = CacheManager.create();
 //                throw new RuntimeException("Cache Configuration does not 
exist");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7b571d43/web/web.rya/resources/environment.properties
----------------------------------------------------------------------
diff --git a/web/web.rya/resources/environment.properties 
b/web/web.rya/resources/environment.properties
deleted file mode 100644
index 2a9ffe7..0000000
--- a/web/web.rya/resources/environment.properties
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-instance.name=cloudbase
-instance.zk=localhost:2181
-instance.username=root
-instance.password=secret
-rya.tableprefix=triplestore_
-rya.displayqueryplan=true
-mongo.db.collectionprefix=rya_
-mongo.db.instance=localhost
-mongo.db.name=rya
-mongo.db.port=27017

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7b571d43/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java
----------------------------------------------------------------------
diff --git 
a/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java 
b/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java
index 93c5972..209c2b4 100644
--- a/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java
+++ b/web/web.rya/src/main/java/org/apache/cloud/rdf/web/sail/RdfController.java
@@ -8,9 +8,9 @@ package org.apache.cloud.rdf.web.sail;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -34,10 +34,12 @@ import javax.servlet.ServletOutputStream;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.rya.api.security.SecurityProvider;
+import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.log4j.Logger;
 import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
+import org.apache.rya.api.log.LogUtils;
+import org.apache.rya.api.security.SecurityProvider;
 import org.apache.rya.rdftriplestore.RdfCloudTripleStoreConnection;
-
 import org.openrdf.model.Resource;
 import org.openrdf.model.Statement;
 import org.openrdf.query.BindingSet;
@@ -82,58 +84,57 @@ import org.springframework.web.bind.annotation.RequestParam;
  */
 @Controller
 public class RdfController {
-    
-       private static final int QUERY_TIME_OUT_SECONDS = 120;
+    private static final Logger log = Logger.getLogger(RdfController.class);
+
+    private static final int QUERY_TIME_OUT_SECONDS = 120;
 
     @Autowired
     SailRepository repository;
-    
-    @Autowired   
+
+    @Autowired
     SecurityProvider provider;
 
     @RequestMapping(value = "/queryrdf", method = {RequestMethod.GET, 
RequestMethod.POST})
-    public void queryRdf(@RequestParam("query") String query,
+    public void queryRdf(@RequestParam("query") final String query,
                          @RequestParam(value = 
RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, required = false) String auth,
-                         @RequestParam(value = 
RdfCloudTripleStoreConfiguration.CONF_CV, required = false) String vis,
-                         @RequestParam(value = 
RdfCloudTripleStoreConfiguration.CONF_INFER, required = false) String infer,
-                         @RequestParam(value = "nullout", required = false) 
String nullout,
-                         @RequestParam(value = 
RdfCloudTripleStoreConfiguration.CONF_RESULT_FORMAT, required = false) String 
emit,
-                         @RequestParam(value = "padding", required = false) 
String padding,
-                         @RequestParam(value = "callback", required = false) 
String callback,
-                         HttpServletRequest request,
-                         HttpServletResponse response) {
+                         @RequestParam(value = 
RdfCloudTripleStoreConfiguration.CONF_CV, required = false) final String vis,
+                         @RequestParam(value = 
RdfCloudTripleStoreConfiguration.CONF_INFER, required = false) final String 
infer,
+                         @RequestParam(value = "nullout", required = false) 
final String nullout,
+                         @RequestParam(value = 
RdfCloudTripleStoreConfiguration.CONF_RESULT_FORMAT, required = false) final 
String emit,
+                         @RequestParam(value = "padding", required = false) 
final String padding,
+                         @RequestParam(value = "callback", required = false) 
final String callback,
+                         final HttpServletRequest request,
+                         final HttpServletResponse response) {
+        // WARNING: if you add to the above request variables,
+        // Be sure to validate and encode since they come from the outside and 
could contain odd damaging character sequences.
         SailRepositoryConnection conn = null;
-               final Thread queryThread = Thread.currentThread();
-               auth = 
StringUtils.arrayToCommaDelimitedString(provider.getUserAuths(request));
-               Timer timer = new Timer();
-               timer.schedule(new TimerTask() {
-
-                       @Override
-                       public void run() {
-                               System.out.println("interrupting");
-                               queryThread.interrupt();
-
-                       }
-               }, QUERY_TIME_OUT_SECONDS * 1000);
-               
-               try {
-                       ServletOutputStream os = response.getOutputStream();
-            conn = repository.getConnection();
+        final Thread queryThread = Thread.currentThread();
+        auth = 
StringUtils.arrayToCommaDelimitedString(provider.getUserAuths(request));
+        final Timer timer = new Timer();
+        timer.schedule(new TimerTask() {
 
-            Boolean isBlankQuery = StringUtils.isEmpty(query);
-            ParsedOperation operation = 
QueryParserUtil.parseOperation(QueryLanguage.SPARQL, query, null);
+            @Override
+            public void run() {
+                log.debug("interrupting");
+                queryThread.interrupt();
 
-            Boolean requestedCallback = !StringUtils.isEmpty(callback);
-            Boolean requestedFormat = !StringUtils.isEmpty(emit);
-
-            if (requestedCallback) {
-                os.print(callback + "(");
             }
+        }, QUERY_TIME_OUT_SECONDS * 1000);
+
+        try {
+            final ServletOutputStream os = response.getOutputStream();
+            conn = repository.getConnection();
+
+            final Boolean isBlankQuery = StringUtils.isEmpty(query);
+            final ParsedOperation operation = 
QueryParserUtil.parseOperation(QueryLanguage.SPARQL, query, null);
+
+            final Boolean requestedCallback = !StringUtils.isEmpty(callback);
+            final Boolean requestedFormat = !StringUtils.isEmpty(emit);
 
             if (!isBlankQuery) {
-               if (operation instanceof ParsedGraphQuery) {
-                       // Perform Graph Query
-                    RDFHandler handler = new RDFXMLWriter(os);
+                if (operation instanceof ParsedGraphQuery) {
+                    // Perform Graph Query
+                    final RDFHandler handler = new RDFXMLWriter(os);
                     response.setContentType("text/xml");
                     performGraphQuery(query, conn, auth, infer, nullout, 
handler);
                 } else if (operation instanceof ParsedTupleQuery) {
@@ -160,33 +161,35 @@ public class RdfController {
             if (requestedCallback) {
                 os.print(")");
             }
-        } catch (Exception e) {
-            e.printStackTrace();
+        } catch (final Exception e) {
+            log.error("Error running query", e);
             throw new RuntimeException(e);
         } finally {
             if (conn != null) {
                 try {
                     conn.close();
-                } catch (RepositoryException e) {
-                    e.printStackTrace();
+                } catch (final RepositoryException e) {
+                    log.error("Error closing connection", e);
                 }
             }
         }
-               
-               timer.cancel();
+
+        timer.cancel();
     }
-    
-    private void performQuery(String query, RepositoryConnection conn, String 
auth, String infer, String nullout, TupleQueryResultHandler handler) throws 
RepositoryException, MalformedQueryException, QueryEvaluationException, 
TupleQueryResultHandlerException {
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, 
query);
-        if (auth != null && auth.length() > 0)
+
+    private void performQuery(final String query, final RepositoryConnection 
conn, final String auth, final String infer, final String nullout, final 
TupleQueryResultHandler handler) throws RepositoryException, 
MalformedQueryException, QueryEvaluationException, 
TupleQueryResultHandlerException {
+        final TupleQuery tupleQuery = 
conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        if (auth != null && auth.length() > 0) {
             
tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, 
VALUE_FACTORY.createLiteral(auth));
-        if (infer != null && infer.length() > 0)
+        }
+        if (infer != null && infer.length() > 0) {
             tupleQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, 
VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
+        }
         if (nullout != null && nullout.length() > 0) {
             //output nothing, but still run query
             tupleQuery.evaluate(new TupleQueryResultHandler() {
                 @Override
-                public void startQueryResult(List<String> strings) throws 
TupleQueryResultHandlerException {
+                public void startQueryResult(final List<String> strings) 
throws TupleQueryResultHandlerException {
                 }
 
                 @Override
@@ -194,147 +197,155 @@ public class RdfController {
                 }
 
                 @Override
-                public void handleSolution(BindingSet bindings) throws 
TupleQueryResultHandlerException {
+                public void handleSolution(final BindingSet bindings) throws 
TupleQueryResultHandlerException {
                 }
 
                 @Override
-                public void handleBoolean(boolean arg0) throws 
QueryResultHandlerException {
+                public void handleBoolean(final boolean arg0) throws 
QueryResultHandlerException {
                 }
 
                 @Override
-                public void handleLinks(List<String> arg0) throws 
QueryResultHandlerException {
+                public void handleLinks(final List<String> arg0) throws 
QueryResultHandlerException {
                 }
             });
         } else {
-            CountingTupleQueryResultHandlerWrapper sparqlWriter = new 
CountingTupleQueryResultHandlerWrapper(handler);
-            long startTime = System.currentTimeMillis();
+            final CountingTupleQueryResultHandlerWrapper sparqlWriter = new 
CountingTupleQueryResultHandlerWrapper(handler);
+            final long startTime = System.currentTimeMillis();
             tupleQuery.evaluate(sparqlWriter);
-            System.out.format("Query Time = %.3f\n", 
(System.currentTimeMillis() - startTime) / 1000.);
-            System.out.format("Result Count = %s\n", sparqlWriter.getCount());
+            log.info(String.format("Query Time = %.3f\n", 
(System.currentTimeMillis() - startTime) / 1000.));
+            log.info(String.format("Result Count = %s\n", 
sparqlWriter.getCount()));
         }
 
     }
-    
-    private void performGraphQuery(String query, RepositoryConnection conn, 
String auth, String infer, String nullout, RDFHandler handler) throws 
RepositoryException, MalformedQueryException, QueryEvaluationException, 
RDFHandlerException {
-        GraphQuery graphQuery = conn.prepareGraphQuery(QueryLanguage.SPARQL, 
query);
-        if (auth != null && auth.length() > 0)
-               
graphQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, 
VALUE_FACTORY.createLiteral(auth));
-        if (infer != null && infer.length() > 0)
-               
graphQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, 
VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
+
+    private void performGraphQuery(final String query, final 
RepositoryConnection conn, final String auth, final String infer, final String 
nullout, final RDFHandler handler) throws RepositoryException, 
MalformedQueryException, QueryEvaluationException, RDFHandlerException {
+        final GraphQuery graphQuery = 
conn.prepareGraphQuery(QueryLanguage.SPARQL, query);
+        if (auth != null && auth.length() > 0) {
+            
graphQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH, 
VALUE_FACTORY.createLiteral(auth));
+        }
+        if (infer != null && infer.length() > 0) {
+            graphQuery.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, 
VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
+        }
         if (nullout != null && nullout.length() > 0) {
             //output nothing, but still run query
-               // TODO this seems like a strange use case.
-               graphQuery.evaluate(new RDFHandler() {
-                               @Override
-                               public void startRDF() throws 
RDFHandlerException {
-                               }
-
-                               @Override
-                               public void endRDF() throws RDFHandlerException 
{
-                               }
-
-                               @Override
-                               public void handleNamespace(String prefix, 
String uri)
-                                               throws RDFHandlerException {
-                               }
-
-                               @Override
-                               public void handleStatement(Statement st)
-                                               throws RDFHandlerException {
-                               }
-
-                               @Override
-                               public void handleComment(String comment)
-                                               throws RDFHandlerException {
-                               }
+            // TODO this seems like a strange use case.
+            graphQuery.evaluate(new RDFHandler() {
+                @Override
+                public void startRDF() throws RDFHandlerException {
+                }
+
+                @Override
+                public void endRDF() throws RDFHandlerException {
+                }
+
+                @Override
+                public void handleNamespace(final String prefix, final String 
uri)
+                        throws RDFHandlerException {
+                }
+
+                @Override
+                public void handleStatement(final Statement st)
+                        throws RDFHandlerException {
+                }
+
+                @Override
+                public void handleComment(final String comment)
+                        throws RDFHandlerException {
+                }
             });
         } else {
-            long startTime = System.currentTimeMillis();
+            final long startTime = System.currentTimeMillis();
             graphQuery.evaluate(handler);
-            System.out.format("Query Time = %.3f\n", 
(System.currentTimeMillis() - startTime) / 1000.);
+            log.info(String.format("Query Time = %.3f\n", 
(System.currentTimeMillis() - startTime) / 1000.));
         }
 
     }
-    private void performUpdate(String query, SailRepositoryConnection conn, 
ServletOutputStream os, String infer, String vis) throws RepositoryException, 
MalformedQueryException, IOException {
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-        if (infer != null && infer.length() > 0)
+    private void performUpdate(final String query, final 
SailRepositoryConnection conn, final ServletOutputStream os, final String 
infer, final String vis) throws RepositoryException, MalformedQueryException, 
IOException {
+        final Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        if (infer != null && infer.length() > 0) {
             update.setBinding(RdfCloudTripleStoreConfiguration.CONF_INFER, 
VALUE_FACTORY.createLiteral(Boolean.parseBoolean(infer)));
+        }
 
         if (conn.getSailConnection() instanceof RdfCloudTripleStoreConnection 
&& vis != null) {
-            RdfCloudTripleStoreConnection sailConnection = 
(RdfCloudTripleStoreConnection) conn.getSailConnection();
+            final RdfCloudTripleStoreConnection<?> sailConnection = 
(RdfCloudTripleStoreConnection<?>) conn.getSailConnection();
             
sailConnection.getConf().set(RdfCloudTripleStoreConfiguration.CONF_CV, vis);
         }
 
-        long startTime = System.currentTimeMillis();
+        final long startTime = System.currentTimeMillis();
 
         try {
             update.execute();
-        } catch (UpdateExecutionException e) {
-            os.print(String.format("Update could not be successfully completed 
for query: %s\n\n", query));
-            os.print(String.format("\n\n%s", e.getLocalizedMessage()));
+        } catch (final UpdateExecutionException e) {
+            final String message = "Update could not be successfully completed 
for query: ";
+            os.print(String.format(message + "%s\n\n", 
StringEscapeUtils.escapeHtml4(query)));
+            log.error(message + LogUtils.clean(query), e);
         }
 
-        System.out.format("Update Time = %.3f\n", (System.currentTimeMillis() 
- startTime) / 1000.);
-    }    
-    
+        log.info(String.format("Update Time = %.3f\n", 
(System.currentTimeMillis() - startTime) / 1000.));
+    }
+
     private static final class CountingTupleQueryResultHandlerWrapper 
implements TupleQueryResultHandler {
-       private TupleQueryResultHandler indir;
-       private int count = 0;
-       
-       public CountingTupleQueryResultHandlerWrapper(TupleQueryResultHandler 
indir){
-               this.indir = indir;
-       }
-       
-       public int getCount() { return count; }
-       
-       @Override
-       public void endQueryResult() throws TupleQueryResultHandlerException {
-               indir.endQueryResult();
-       }
-
-       @Override
-       public void handleSolution(BindingSet bindingSet) throws 
TupleQueryResultHandlerException {
-               count++;
-               indir.handleSolution(bindingSet);
-       }
-       @Override
-       public void startQueryResult(List<String> bindingNames) throws 
TupleQueryResultHandlerException {
-               count = 0;
-               indir.startQueryResult(bindingNames);
-       }
-
-      @Override
-      public void handleBoolean(boolean arg0) throws 
QueryResultHandlerException {
-      }
-
-      @Override
-      public void handleLinks(List<String> arg0) throws 
QueryResultHandlerException {
-      }
+        private final TupleQueryResultHandler indir;
+        private int count = 0;
+
+        public CountingTupleQueryResultHandlerWrapper(final 
TupleQueryResultHandler indir){
+            this.indir = indir;
+        }
+
+        public int getCount() {
+            return count;
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+            indir.endQueryResult();
+        }
+
+        @Override
+        public void handleSolution(final BindingSet bindingSet) throws 
TupleQueryResultHandlerException {
+            count++;
+            indir.handleSolution(bindingSet);
+        }
+
+        @Override
+        public void startQueryResult(final List<String> bindingNames) throws 
TupleQueryResultHandlerException {
+            count = 0;
+            indir.startQueryResult(bindingNames);
+        }
+
+        @Override
+        public void handleBoolean(final boolean arg0) throws 
QueryResultHandlerException {
+        }
+
+        @Override
+        public void handleLinks(final List<String> arg0) throws 
QueryResultHandlerException {
+        }
     }
 
     @RequestMapping(value = "/loadrdf", method = RequestMethod.POST)
-    public void loadRdf(@RequestParam(required = false) String format,
-            @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_CV, 
required = false) String cv,
-            @RequestParam(required = false) String graph,
-                        @RequestBody String body,
-                        HttpServletResponse response)
+    public void loadRdf(@RequestParam(required = false) final String format,
+            @RequestParam(value = RdfCloudTripleStoreConfiguration.CONF_CV, 
required = false) final String cv,
+            @RequestParam(required = false) final String graph,
+                        @RequestBody final String body,
+                        final HttpServletResponse response)
             throws RepositoryException, IOException, RDFParseException {
-        List<Resource> authList = new ArrayList<Resource>();
+        final List<Resource> authList = new ArrayList<Resource>();
         RDFFormat format_r = RDFFormat.RDFXML;
         if (format != null) {
             format_r = RDFFormat.valueOf(format);
-            if (format_r == null)
+            if (format_r == null) {
                 throw new RuntimeException("RDFFormat[" + format + "] not 
found");
+            }
         }
         if (graph != null) {
-               authList.add(VALUE_FACTORY.createURI(graph));
+            authList.add(VALUE_FACTORY.createURI(graph));
         }
         SailRepositoryConnection conn = null;
         try {
             conn = repository.getConnection();
-            
+
             if (conn.getSailConnection() instanceof 
RdfCloudTripleStoreConnection && cv != null) {
-                RdfCloudTripleStoreConnection sailConnection = 
(RdfCloudTripleStoreConnection) conn.getSailConnection();
+                final RdfCloudTripleStoreConnection<?> sailConnection = 
(RdfCloudTripleStoreConnection<?>) conn.getSailConnection();
                 
sailConnection.getConf().set(RdfCloudTripleStoreConfiguration.CONF_CV, cv);
             }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7b571d43/web/web.rya/src/main/webapp/sparqlQuery.jsp
----------------------------------------------------------------------
diff --git a/web/web.rya/src/main/webapp/sparqlQuery.jsp 
b/web/web.rya/src/main/webapp/sparqlQuery.jsp
index a787b81..947a2db 100644
--- a/web/web.rya/src/main/webapp/sparqlQuery.jsp
+++ b/web/web.rya/src/main/webapp/sparqlQuery.jsp
@@ -20,16 +20,22 @@ under the License.
 <%@ page contentType="text/html; charset=iso-8859-1" language="java" %>
 <%@ page import="java.net.*" %>
 <%
-    String sparql=request.getParameter("sparql");
-    String infer=request.getParameter("infer");
-    String auth=request.getParameter("auth");
-    String vis=request.getParameter("vis");
-       String resultFormat = request.getParameter("emit");
-    String padding = request.getParameter("padding");
+    String x;
+    String sparql=(x=request.getParameter("sparql"))==null?"":x;
+
+    if(sparql.length() > 0 ) {
+        String infer = (x=request.getParameter("infer"))==null?"":x;
+        String auth = (x=request.getParameter("auth"))==null?"":x;
+        String vis = (x=request.getParameter("vis"))==null?"":x;
+        String resultFormat = (x=request.getParameter("emit"))==null?"":x;
+
+        String urlTo = //
+        "queryrdf?query.infer="+URLEncoder.encode(infer,"UTF-8") //
+        +"&query.auth="+URLEncoder.encode(auth,"UTF-8") //
+        +"&conf.cv="+URLEncoder.encode(vis,"UTF-8") //
+        +"&query.resultformat="+URLEncoder.encode(resultFormat,"UTF-8") //
+        +"&query="+URLEncoder.encode(sparql,"UTF-8");
 
-    if(sparql != null){
-        String sparqlEnc = URLEncoder.encode(sparql,"UTF-8");
-        String urlTo = 
"queryrdf?query.infer="+infer+"&query.auth="+auth+"&conf.cv="+vis+"&query.resultformat="+resultFormat+"&padding="+padding+"&query="+sparqlEnc;
         response.sendRedirect(urlTo);
     }
 %>
@@ -43,9 +49,7 @@ under the License.
     </tr>
   <tr>
     <td>SPARQL Query: </td>
-    <td><textarea cols="150" rows="50" name="sparql">
-Enter Sparql query here
-    </textarea></td>
+    <td><textarea cols="150" rows="50" name="sparql" placeholder="Enter SPARQL 
query here"></textarea></td>
   </tr>
   <tr>
       <td>Inferencing?(true/false)</td>
@@ -66,10 +70,6 @@ Enter Sparql query here
                                <option value="json">JSON</option>
                        </select></td>
                </tr>
-               <tr>
-                       <td>JSONP Padding</td>
-                       <td><input type=text name="padding" size="20"></td>
-               </tr>
   <tr>
     <td>&nbsp;</td>
     <td><input type="submit" name="submit" value="Submit"></td>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/7b571d43/web/web.rya/src/test/java/org/apache/cloud/rdf/web/sail/RdfControllerTest.java
----------------------------------------------------------------------
diff --git 
a/web/web.rya/src/test/java/org/apache/cloud/rdf/web/sail/RdfControllerTest.java
 
b/web/web.rya/src/test/java/org/apache/cloud/rdf/web/sail/RdfControllerTest.java
index a09c9b1..f69de61 100644
--- 
a/web/web.rya/src/test/java/org/apache/cloud/rdf/web/sail/RdfControllerTest.java
+++ 
b/web/web.rya/src/test/java/org/apache/cloud/rdf/web/sail/RdfControllerTest.java
@@ -8,9 +8,9 @@ package org.apache.cloud.rdf.web.sail;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -19,15 +19,15 @@ package org.apache.cloud.rdf.web.sail;
  * under the License.
  */
 
-
-
-import static org.hamcrest.Matchers.equalToIgnoringWhiteSpace;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.not;
 import static org.junit.Assert.assertTrue;
 import static 
org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
 import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
 import static 
org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
 import static 
org.springframework.test.web.servlet.setup.MockMvcBuilders.standaloneSetup;
 
+import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -111,16 +111,49 @@ public class RdfControllerTest {
                 .andExpect(status().isOk())
                 .andExpect(content().contentType(MediaType.TEXT_XML));
     }
-
+    
+    /**
+     * Make sure input variables with odd characters that could be json get 
encoded.
+     * literals should be represented correctly, for example: "bad" in JSON 
becomes \"bad\" (java escaped \\\"bad\\\")
+     * 
+     * @throws Exception
+     */
     @Test
-    public void callback() throws Exception {
-        this.mockMvc.perform(get("/queryrdf")
-                .param("query", "")
-                .param("callback", "test"))
-                .andExpect(status().isOk())
-                
.andExpect(content().string(equalToIgnoringWhiteSpace("test()")));
+    public void callbackEncodeCorrectlyJson() throws Exception {
+        this.mockMvc.perform(get("/queryrdf") //
+                        .param("query", "select ?x where { BIND( 
'''testbad\\\\or\"bad\"''' AS ?x) }") //
+                        
.param(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH    , 
"test<bad>or{bad}or\"bad\"") //
+                        .param(RdfCloudTripleStoreConfiguration.CONF_CV        
    , "test<bad>or{bad}or\"bad\"") //
+                        .param(RdfCloudTripleStoreConfiguration.CONF_INFER     
    , "test<bad>or{bad}or\"bad\"") //
+                        
.param(RdfCloudTripleStoreConfiguration.CONF_RESULT_FORMAT, "json")) //
+                        
.andExpect(content().string(not(containsString("<bad>")))) // non-query param 
data should not be in the results
+                        
.andExpect(content().string(not(containsString("{bad}")))) // non-query param 
data should not be in the results
+                        
.andExpect(content().string(not(containsString("bad\\or")))) // should be 
bad\\\\or
+                        
.andExpect(content().string(not(containsString("\"bad\"")))); // should be 
\\\"bad\\\"
     }
 
+    /**
+     * Make sure input variables with odd characters that could be xml get 
encoded.
+     * literals should be represented correctly, for example: <bad> in xml 
becomes &lt;bad&gt;
+     * 
+     * @throws Exception
+     */
+@Test
+    public void encodeCorrectlyXml() throws Exception {
+        this.mockMvc.perform(get("/queryrdf") //
+                        .param("query", "select ?x where { BIND( 
'''test<bad>or&bador&apos;bador&quot;bad&quot;''' AS ?x ) }") //
+                    .param(RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH    
, "test<bad>or{bad}or\"bad\"") //
+                    .param(RdfCloudTripleStoreConfiguration.CONF_CV            
, "test<bad>or{bad}or\"bad\"") //
+                    .param(RdfCloudTripleStoreConfiguration.CONF_INFER         
, "test<bad>or{bad}or\"bad\"") //
+                        
.param(RdfCloudTripleStoreConfiguration.CONF_RESULT_FORMAT, 
"test<bad>or{bad}or\"bad\"")) //
+                        
.andExpect(content().string(not(containsString("<bad")))) // &lt;
+                        
.andExpect(content().string(not(containsString("bad>")))) // &gt;
+                        
.andExpect(content().string(not(containsString("{bad}"))))// non-query param 
data should not be in the results
+                        
.andExpect(content().string(not(containsString("&bad")))) // &amp;
+                        
.andExpect(content().string(not(containsString("'bad")))) // &amp;apos; 
converted from &apos;
+                        
.andExpect(content().string(not(containsString("\"bad")))); // &amp;&quot; 
converted from &quot;
+}
+
     @Test
     public void malformedQuery() throws Exception {
         thrown.expect(NestedServletException.class);
@@ -131,7 +164,7 @@ public class RdfControllerTest {
     @Test
     public void updateQuery() throws Exception {
         this.mockMvc.perform(get("/queryrdf")
-                .param("query", "INSERT DATA { 
<http://mynamespace/ProductType1> <http://mynamespace#pred1> \"test\" }"))
+                .param("query", "INSERT \n DATA\n {\n 
<http://mynamespace/ProductType1> <http://mynamespace#pred1> \"test\" }"))
                 .andExpect(status().isOk());
 
         ValueFactory vf = repository.getValueFactory();

Reply via email to