Author: andy Date: Tue Nov 12 14:19:37 2013 New Revision: 1541072 URL: http://svn.apache.org/r1541072 Log: Default unknown formats to NQuads or NTriples, depending on whether loading a graph or a dataset.
Modified: jena/trunk/jena-tdb/src/main/java/com/hp/hpl/jena/tdb/store/bulkloader/BulkLoader.java jena/trunk/jena-tdb/src/main/java/tdb/tdbloader.java Modified: jena/trunk/jena-tdb/src/main/java/com/hp/hpl/jena/tdb/store/bulkloader/BulkLoader.java URL: http://svn.apache.org/viewvc/jena/trunk/jena-tdb/src/main/java/com/hp/hpl/jena/tdb/store/bulkloader/BulkLoader.java?rev=1541072&r1=1541071&r2=1541072&view=diff ============================================================================== --- jena/trunk/jena-tdb/src/main/java/com/hp/hpl/jena/tdb/store/bulkloader/BulkLoader.java (original) +++ jena/trunk/jena-tdb/src/main/java/com/hp/hpl/jena/tdb/store/bulkloader/BulkLoader.java Tue Nov 12 14:19:37 2013 @@ -16,15 +16,16 @@ * limitations under the License. */ -package com.hp.hpl.jena.tdb.store.bulkloader; +package com.hp.hpl.jena.tdb.store.bulkloader ; import java.io.InputStream ; import java.util.List ; import org.apache.jena.atlas.event.EventType ; import org.apache.jena.atlas.lib.Tuple ; +import org.apache.jena.riot.Lang ; +import org.apache.jena.riot.RDFDataMgr ; import org.apache.jena.riot.RDFLanguages ; -import org.apache.jena.riot.RiotReader ; import org.slf4j.Logger ; import com.hp.hpl.jena.graph.Node ; @@ -42,281 +43,260 @@ import com.hp.hpl.jena.tdb.store.Dataset import com.hp.hpl.jena.tdb.sys.Names ; /** Overall framework for bulk loading */ -public class BulkLoader -{ +public class BulkLoader { // Coordinate the NodeTupleTable loading. /** Tick point for messages during loading of data */ - public static int DataTickPoint = 50*1000 ; + public static int DataTickPoint = 50 * 1000 ; /** Tick point for messages during secondary index creation */ - public static long IndexTickPoint = 100*1000 ; - + public static long IndexTickPoint = 100 * 1000 ; + /** Number of ticks per super tick */ - public static int superTick = 10 ; - + public static int superTick = 10 ; + // Events. - //private static String baseNameGeneral = "http://openjena.org/TDB/event#" ; + // private static String baseNameGeneral = "http://openjena.org/TDB/event#" + // ; + + private static String baseName = "http://openjena.org/TDB/bulkload/event#" ; + + public static EventType evStartBulkload = new EventType(baseName + "start-bulkload") ; + public static EventType evFinishBulkload = new EventType(baseName + "finish-bulkload") ; + + public static EventType evStartDataBulkload = new EventType(baseName + "start-bulkload-data") ; + public static EventType evFinishDataBulkload = new EventType(baseName + "finish-bulkload-data") ; + + public static EventType evStartIndexBulkload = new EventType(baseName + "start-bulkload-index") ; + public static EventType evFinishIndexBulkload = new EventType(baseName + "finish-bulkload-index") ; + + static private Logger loadLogger = TDB.logLoader ; - private static String baseName = "http://openjena.org/TDB/bulkload/event#" ; - - - public static EventType evStartBulkload = new EventType(baseName+"start-bulkload") ; - public static EventType evFinishBulkload = new EventType(baseName+"finish-bulkload") ; - - public static EventType evStartDataBulkload = new EventType(baseName+"start-bulkload-data") ; - public static EventType evFinishDataBulkload = new EventType(baseName+"finish-bulkload-data") ; - - public static EventType evStartIndexBulkload = new EventType(baseName+"start-bulkload-index") ; - public static EventType evFinishIndexBulkload = new EventType(baseName+"finish-bulkload-index") ; - - - static private Logger loadLogger = TDB.logLoader ; - // Event callbacks for the load stages? - // On what object? The dataset. + // On what object? The dataset. + + // /** Load into default graph */ + // public static void loadTriples(DatasetGraphTDB dsg, String url, boolean + // showProgress) + // { + // loadTriples(dsg, asList(url) , showProgress) ; + // } -// /** Load into default graph */ -// public static void loadTriples(DatasetGraphTDB dsg, String url, boolean showProgress) -// { -// loadTriples(dsg, asList(url) , showProgress) ; -// } - /** Load into default graph */ - public static void loadDefaultGraph(DatasetGraphTDB dsg, List<String> urls, boolean showProgress) - { + public static void loadDefaultGraph(DatasetGraphTDB dsg, List<String> urls, boolean showProgress) { BulkStreamRDF dest = destinationDefaultGraph(dsg, showProgress) ; loadTriples$(dest, urls) ; } /** Load into default graph */ - public static void loadDefaultGraph(DatasetGraphTDB dsg, InputStream input, boolean showProgress) - { + public static void loadDefaultGraph(DatasetGraphTDB dsg, InputStream input, boolean showProgress) { BulkStreamRDF dest = destinationDefaultGraph(dsg, showProgress) ; loadTriples$(dest, input) ; } - private static BulkStreamRDF destinationDefaultGraph(DatasetGraphTDB dsg, boolean showProgress) - { + private static BulkStreamRDF destinationDefaultGraph(DatasetGraphTDB dsg, boolean showProgress) { return destinationGraph(dsg, null, showProgress) ; } /** Load into named graph */ - public static void loadNamedGraph(DatasetGraphTDB dsg, Node graphNode, List<String> urls, boolean showProgress) - { + public static void loadNamedGraph(DatasetGraphTDB dsg, Node graphNode, List<String> urls, boolean showProgress) { BulkStreamRDF dest = destinationNamedGraph(dsg, graphNode, showProgress) ; loadTriples$(dest, urls) ; } - + /** Load into named graph */ - public static void loadNamedGraph(DatasetGraphTDB dsg, Node graphNode, InputStream input, boolean showProgress) - { + public static void loadNamedGraph(DatasetGraphTDB dsg, Node graphNode, InputStream input, boolean showProgress) { BulkStreamRDF dest = destinationNamedGraph(dsg, graphNode, showProgress) ; loadTriples$(dest, input) ; } /** Load into a dataset */ - public static void loadDataset(DatasetGraphTDB dsg, List<String> urls, boolean showProgress) - { + public static void loadDataset(DatasetGraphTDB dsg, List<String> urls, boolean showProgress) { BulkStreamRDF dest = destinationDataset(dsg, showProgress) ; loadQuads$(dest, urls) ; } - + /** Load into a dataset */ - public static void loadDataset(DatasetGraphTDB dsg, InputStream input, boolean showProgress) - { + public static void loadDataset(DatasetGraphTDB dsg, InputStream input, boolean showProgress) { BulkStreamRDF dest = destinationDataset(dsg, showProgress) ; loadQuads$(dest, input) ; } /** Load into a graph */ - private static void loadTriples$(BulkStreamRDF dest, List<String> urls) - { + private static void loadTriples$(BulkStreamRDF dest, List<String> urls) { dest.startBulk() ; - for ( String url : urls ) - { - loadLogger.info("Load: "+url+" -- "+Utils.nowAsString()) ; - RiotReader.parse(url, dest) ; - } + for ( String url : urls ) { + loadLogger.info("Load: " + url + " -- " + Utils.nowAsString()) ; + Lang lang = RDFLanguages.filenameToLang(url, Lang.NTRIPLES) ; + RDFDataMgr.parse(dest, url, lang) ; + } dest.finishBulk() ; } /** Load into a graph */ - private static void loadTriples$(BulkStreamRDF dest, InputStream input) - { - loadLogger.info("Load: from input stream -- "+Utils.nowAsString()) ; + private static void loadTriples$(BulkStreamRDF dest, InputStream input) { + loadLogger.info("Load: from input stream -- " + Utils.nowAsString()) ; dest.startBulk() ; - RiotReader.parse(input, RDFLanguages.NTRIPLES, null, dest) ; + RDFDataMgr.parse(dest, input, Lang.NTRIPLES) ; dest.finishBulk() ; } - + /** Load quads into a dataset */ - private static void loadQuads$(BulkStreamRDF dest, List<String> urls) - { + private static void loadQuads$(BulkStreamRDF dest, List<String> urls) { dest.startBulk() ; - for ( String url : urls ) - { - loadLogger.info("Load: "+url+" -- "+Utils.nowAsString()) ; - RiotReader.parse(url, dest) ; + for ( String url : urls ) { + loadLogger.info("Load: " + url + " -- " + Utils.nowAsString()) ; + Lang lang = RDFLanguages.filenameToLang(url, Lang.NQUADS) ; + RDFDataMgr.parse(dest, url, lang) ; } dest.finishBulk() ; } /** Load quads into a dataset */ - private static void loadQuads$(BulkStreamRDF dest, InputStream input) - { - loadLogger.info("Load: from input stream -- "+Utils.nowAsString()) ; + private static void loadQuads$(BulkStreamRDF dest, InputStream input) { + loadLogger.info("Load: from input stream -- " + Utils.nowAsString()) ; dest.startBulk() ; - RiotReader.parse(input, RDFLanguages.NQUADS, null, dest) ; + RDFDataMgr.parse(dest, input, RDFLanguages.NQUADS) ; dest.finishBulk() ; } - - private static BulkStreamRDF destinationNamedGraph(DatasetGraphTDB dsg, Node graphName, boolean showProgress) - { + + private static BulkStreamRDF destinationNamedGraph(DatasetGraphTDB dsg, Node graphName, boolean showProgress) { if ( graphName == null ) - return destinationDefaultGraph(dsg,showProgress) ; + return destinationDefaultGraph(dsg, showProgress) ; return destinationGraph(dsg, graphName, showProgress) ; } - public static LoadMonitor createLoadMonitor(DatasetGraphTDB dsg, String itemName, boolean showProgress) - { - if ( showProgress ) + public static LoadMonitor createLoadMonitor(DatasetGraphTDB dsg, String itemName, boolean showProgress) { + if ( showProgress ) return new LoadMonitor(dsg, loadLogger, itemName, DataTickPoint, IndexTickPoint) ; else - return new LoadMonitor(dsg, null, itemName, DataTickPoint, IndexTickPoint) ; + return new LoadMonitor(dsg, null, itemName, DataTickPoint, IndexTickPoint) ; } - private static BulkStreamRDF destinationDataset(DatasetGraphTDB dsg, boolean showProgress) - { + private static BulkStreamRDF destinationDataset(DatasetGraphTDB dsg, boolean showProgress) { return new DestinationDSG(dsg, showProgress) ; } - - private static BulkStreamRDF destinationGraph(DatasetGraphTDB dsg, Node graphNode, boolean showProgress) - { + + private static BulkStreamRDF destinationGraph(DatasetGraphTDB dsg, Node graphNode, boolean showProgress) { return new DestinationGraph(dsg, graphNode, showProgress) ; } // Load triples and quads into a dataset. - private static final class DestinationDSG implements BulkStreamRDF - { - final private DatasetGraphTDB dsg ; - final private boolean startedEmpty ; - final private LoadMonitor monitor1 ; - final private LoadMonitor monitor2 ; + private static final class DestinationDSG implements BulkStreamRDF { + final private DatasetGraphTDB dsg ; + final private boolean startedEmpty ; + final private LoadMonitor monitor1 ; + final private LoadMonitor monitor2 ; final private LoaderNodeTupleTable loaderTriples ; final private LoaderNodeTupleTable loaderQuads ; - final private boolean showProgress ; - private long count = 0 ; - private StatsCollector stats ; - - DestinationDSG(final DatasetGraphTDB dsg, boolean showProgress) - { + final private boolean showProgress ; + private long count = 0 ; + private StatsCollector stats ; + + DestinationDSG(final DatasetGraphTDB dsg, boolean showProgress) { this.dsg = dsg ; startedEmpty = dsg.isEmpty() ; monitor1 = createLoadMonitor(dsg, "triples", showProgress) ; monitor2 = createLoadMonitor(dsg, "quads", showProgress) ; - + loaderTriples = new LoaderNodeTupleTable(dsg.getTripleTable().getNodeTupleTable(), "triples", monitor1) ; loaderQuads = new LoaderNodeTupleTable(dsg.getQuadTable().getNodeTupleTable(), "quads", monitor2) ; this.showProgress = showProgress ; } - + @Override - final public void startBulk() - { + final public void startBulk() { loaderTriples.loadStart() ; loaderQuads.loadStart() ; - + loaderTriples.loadDataStart() ; loaderQuads.loadDataStart() ; this.stats = new StatsCollector() ; } - + @Override - public void triple(Triple triple) - { + public void triple(Triple triple) { Node s = triple.getSubject() ; Node p = triple.getPredicate() ; Node o = triple.getObject() ; - process(Quad.tripleInQuad, s, p, o ) ; + process(Quad.tripleInQuad, s, p, o) ; } - + @Override - public void quad(Quad quad) - { + public void quad(Quad quad) { Node s = quad.getSubject() ; Node p = quad.getPredicate() ; Node o = quad.getObject() ; Node g = null ; // Union graph?! - if ( ! quad.isTriple() && ! quad.isDefaultGraph() ) + if ( !quad.isTriple() && !quad.isDefaultGraph() ) g = quad.getGraph() ; - process(g,s,p,o) ; + process(g, s, p, o) ; } - - private void process(Node g, Node s, Node p, Node o) - { - if ( g == null ) + + private void process(Node g, Node s, Node p, Node o) { + if ( g == null ) loaderTriples.load(s, p, o) ; else loaderQuads.load(g, s, p, o) ; count++ ; - stats.record(g, s, p, o) ; + stats.record(g, s, p, o) ; } - + @Override - public void finishBulk() - { + public void finishBulk() { loaderTriples.loadDataFinish() ; loaderQuads.loadDataFinish() ; - + loaderTriples.loadIndexStart() ; loaderQuads.loadIndexStart() ; - + loaderTriples.loadIndexFinish() ; loaderQuads.loadIndexFinish() ; - + loaderTriples.loadFinish() ; loaderQuads.loadFinish() ; - if ( ! dsg.getLocation().isMem() && startedEmpty ) - { + if ( !dsg.getLocation().isMem() && startedEmpty ) { String filename = dsg.getLocation().getPath(Names.optStats) ; Stats.write(filename, stats.results()) ; } forceSync(dsg) ; } - + @Override - public void start() {} + public void start() {} + @Override - public void tuple(Tuple<Node> tuple) { throw new TDBException("Tuple encountered while loading a dataset") ; } + public void tuple(Tuple<Node> tuple) { + throw new TDBException("Tuple encountered while loading a dataset") ; + } + @Override - public void base(String base) {} - + public void base(String base) {} + @Override - public void prefix(String prefix, String iri) - { dsg.getPrefixes().getPrefixMapping().setNsPrefix(prefix, iri) ; } + public void prefix(String prefix, String iri) { + dsg.getPrefixes().getPrefixMapping().setNsPrefix(prefix, iri) ; + } @Override - public void finish() {} + public void finish() {} } // Load triples into a specific NodeTupleTable - private static final class DestinationGraph implements BulkStreamRDF - { - final private DatasetGraphTDB dsg ; - final private Node graphName ; - final private LoadMonitor monitor ; + private static final class DestinationGraph implements BulkStreamRDF { + final private DatasetGraphTDB dsg ; + final private Node graphName ; + final private LoadMonitor monitor ; final private LoaderNodeTupleTable loaderTriples ; - final private boolean startedEmpty ; - private long count = 0 ; - private StatsCollector stats ; + final private boolean startedEmpty ; + private long count = 0 ; + private StatsCollector stats ; // Graph node is null for default graph. - DestinationGraph(final DatasetGraphTDB dsg, Node graphNode, boolean showProgress) - { + DestinationGraph(final DatasetGraphTDB dsg, Node graphNode, boolean showProgress) { this.dsg = dsg ; this.graphName = graphNode ; - + // Choose NodeTupleTable. NodeTupleTable nodeTupleTable ; if ( graphNode == null || Quad.isDefaultGraph(graphNode) ) @@ -331,35 +311,32 @@ public class BulkLoader } @Override - final public void startBulk() - { + final public void startBulk() { loaderTriples.loadStart() ; loaderTriples.loadDataStart() ; this.stats = new StatsCollector() ; } + @Override - final public void triple(Triple triple) - { + final public void triple(Triple triple) { Node s = triple.getSubject() ; Node p = triple.getPredicate() ; Node o = triple.getObject() ; - loaderTriples.load(s, p, o) ; - stats.record(null, s, p, o) ; + loaderTriples.load(s, p, o) ; + stats.record(null, s, p, o) ; count++ ; } @Override - final public void finishBulk() - { + final public void finishBulk() { loaderTriples.loadDataFinish() ; loaderTriples.loadIndexStart() ; loaderTriples.loadIndexFinish() ; loaderTriples.loadFinish() ; - if ( ! dsg.getLocation().isMem() && startedEmpty ) - { + if ( !dsg.getLocation().isMem() && startedEmpty ) { String filename = dsg.getLocation().getPath(Names.optStats) ; Stats.write(filename, stats.results()) ; } @@ -367,41 +344,47 @@ public class BulkLoader } @Override - public void start() {} + public void start() {} + @Override - public void quad(Quad quad) { throw new TDBException("Quad encountered while loading a single graph") ; } + public void quad(Quad quad) { + throw new TDBException("Quad encountered while loading a single graph") ; + } + @Override - public void tuple(Tuple<Node> tuple) { throw new TDBException("Tuple encountered while loading a single graph") ; } + public void tuple(Tuple<Node> tuple) { + throw new TDBException("Tuple encountered while loading a single graph") ; + } + @Override - public void base(String base) { } + public void base(String base) {} + @Override - public void prefix(String prefix, String iri) - { - if ( graphName != null && graphName.isBlank() ) { + public void prefix(String prefix, String iri) { + if ( graphName != null && graphName.isBlank() ) { loadLogger.warn("Prefixes for blank node graphs not stored") ; return ; } - - PrefixMapping pmap = - ( graphName == null ) - ? dsg.getPrefixes().getPrefixMapping() - : dsg.getPrefixes().getPrefixMapping(graphName.getURI()) ; + + PrefixMapping pmap = (graphName == null) + ? dsg.getPrefixes().getPrefixMapping() + : dsg.getPrefixes().getPrefixMapping(graphName.getURI()) ; pmap.setNsPrefix(prefix, iri) ; } + @Override - public void finish() {} + public void finish() {} } - static void forceSync(DatasetGraphTDB dsg) - { + static void forceSync(DatasetGraphTDB dsg) { // Force sync - we have been bypassing DSG tables. // THIS DOES NOT WORK IF modules check for SYNC necessity. - dsg.getTripleTable().getNodeTupleTable().getNodeTable().sync(); - dsg.getQuadTable().getNodeTupleTable().getNodeTable().sync(); - dsg.getQuadTable().getNodeTupleTable().getNodeTable().sync(); - dsg.getPrefixes().getNodeTupleTable().getNodeTable().sync(); + dsg.getTripleTable().getNodeTupleTable().getNodeTable().sync() ; + dsg.getQuadTable().getNodeTupleTable().getNodeTable().sync() ; + dsg.getQuadTable().getNodeTupleTable().getNodeTable().sync() ; + dsg.getPrefixes().getNodeTupleTable().getNodeTable().sync() ; // This is not enough -- modules check whether sync needed. dsg.sync() ; - + } } Modified: jena/trunk/jena-tdb/src/main/java/tdb/tdbloader.java URL: http://svn.apache.org/viewvc/jena/trunk/jena-tdb/src/main/java/tdb/tdbloader.java?rev=1541072&r1=1541071&r2=1541072&view=diff ============================================================================== --- jena/trunk/jena-tdb/src/main/java/tdb/tdbloader.java (original) +++ jena/trunk/jena-tdb/src/main/java/tdb/tdbloader.java Tue Nov 12 14:19:37 2013 @@ -16,7 +16,7 @@ * limitations under the License. */ -package tdb; +package tdb ; import java.util.List ; @@ -31,136 +31,121 @@ import com.hp.hpl.jena.tdb.TDB ; import com.hp.hpl.jena.tdb.TDBLoader ; import com.hp.hpl.jena.tdb.store.GraphTDB ; -public class tdbloader extends CmdTDBGraph -{ -// private static final ArgDecl argGraphDeafult = new ArgDecl(ArgDecl.NoValue, "default") ; - -// private static final ArgDecl argParallel = new ArgDecl(ArgDecl.NoValue, "parallel") ; -// private static final ArgDecl argIncremental = new ArgDecl(ArgDecl.NoValue, "incr", "incremental") ; - - private static final ModModel modRDFS = new ModModel("rdfs") ; - -// private String rdfsVocabFilename = null ; -// private Model rdfsVocab = null ; - - private boolean showProgress = true ; -// private boolean doInParallel = false ; - private boolean doIncremental = false ; - - static public void main(String... argv) - { +public class tdbloader extends CmdTDBGraph { + // private static final ArgDecl argParallel = new ArgDecl(ArgDecl.NoValue, + // "parallel") ; + // private static final ArgDecl argIncremental = new + // ArgDecl(ArgDecl.NoValue, "incr", "incremental") ; + + private static final ModModel modRDFS = new ModModel("rdfs") ; + + // private String rdfsVocabFilename = null ; + // private Model rdfsVocab = null ; + + private boolean showProgress = true ; + // private boolean doInParallel = false ; + private boolean doIncremental = false ; + + static public void main(String... argv) { CmdTDB.init() ; TDB.setOptimizerWarningFlag(false) ; new tdbloader(argv).mainRun() ; } - protected tdbloader(String[] argv) - { + protected tdbloader(String[] argv) { super(argv) ; - -// super.add(argParallel, "--parallel", "Do rebuilding of secondary indexes in a parallel") ; -// super.add(argIncremental, "--incremental", "Do an incremental load (keep indexes during data load)") ; -// super.add(argStats, "--stats", "Generate statistics while loading (new graph only)") ; -// addModule(modRDFS) ; - } + // super.add(argParallel, "--parallel", + // "Do rebuilding of secondary indexes in a parallel") ; + // super.add(argIncremental, "--incremental", + // "Do an incremental load (keep indexes during data load)") ; + // super.add(argStats, "--stats", + // "Generate statistics while loading (new graph only)") ; + // addModule(modRDFS) ; + } @Override - protected void processModulesAndArgs() - { + protected void processModulesAndArgs() { super.processModulesAndArgs() ; -// doInParallel = super.contains(argParallel) ; -// doIncremental = super.contains(argIncremental) ; + // doInParallel = super.contains(argParallel) ; + // doIncremental = super.contains(argIncremental) ; } - + @Override - protected String getSummary() - { - return getCommandName()+" [--desc DATASET | -loc DIR] FILE ..." ; + protected String getSummary() { + return getCommandName() + " [--desc DATASET | -loc DIR] FILE ..." ; } @Override - protected void exec() - { - if ( isVerbose()) - { - System.out.println("Java maximum memory: "+Runtime.getRuntime().maxMemory()); + protected void exec() { + if ( isVerbose() ) { + System.out.println("Java maximum memory: " + Runtime.getRuntime().maxMemory()) ; System.out.println(ARQ.getContext()) ; } if ( isVerbose() ) showProgress = true ; if ( isQuiet() ) showProgress = false ; - + List<String> urls = getPositional() ; if ( urls.size() == 0 ) urls.add("-") ; - - if ( modRDFS.getModel() != null ) - { + + if ( modRDFS.getModel() != null ) { // TODO } - + boolean allTriples = true ; - for ( String url : urls ) - { + for ( String url : urls ) { Lang lang = RDFLanguages.filenameToLang(url, RDFLanguages.NQUADS) ; - if ( lang != null && RDFLanguages.isQuads(lang) ) - { + if ( lang != null && RDFLanguages.isQuads(lang) ) { allTriples = false ; - break ; + break ; } } - - if ( allTriples && graphName == null ) - { + + if ( allTriples && graphName == null ) { loadDefaultGraph(urls) ; return ; } - - if ( graphName == null ) - { + + if ( graphName == null ) { loadQuads(urls) ; - return ; + return ; } // graphName != null - if ( ! allTriples ) - { - for ( String url : urls ) - { + if ( !allTriples ) { + for ( String url : urls ) { Lang lang = RDFLanguages.filenameToLang(url, RDFLanguages.NQUADS) ; if ( lang == null ) // Does not happen due to default above. - cmdError("File suffix not recognized: " +url) ; - if ( lang != null && ! RDFLanguages.isTriples(lang) ) - cmdError("Can only load triples into a named model: "+url) ; + cmdError("File suffix not recognized: " + url) ; + if ( lang != null && !RDFLanguages.isTriples(lang) ) + cmdError("Can only load triples into a named model: " + url) ; } cmdError("Internal error: deteched quad input but can't find it again") ; return ; } - + loadNamedGraph(urls) ; } - + // RDFS - - void loadDefaultGraph(List<String> urls) - { + + void loadDefaultGraph(List<String> urls) { GraphTDB graph = getGraph() ; TDBLoader.load(graph, urls, showProgress) ; return ; } - - void loadNamedGraph(List<String> urls) - { + + void loadNamedGraph(List<String> urls) { GraphTDB graph = getGraph() ; TDBLoader.load(graph, urls, showProgress) ; return ; } - void loadQuads(List<String> urls) - { + void loadQuads(List<String> urls) { TDBLoader.load(getDatasetGraphTDB(), urls, showProgress) ; return ; }