On 16/04/12 21:44, Milorad Tosic wrote:
Andy,

I tried to recreate what you discussed in this thread related to working with 
prefixes in TDB, but it didn't work for me. I want to collect prefixes from 
default graph of a dataset as well as all other named graphs from the same 
dataset. Note that similar code works fine with non-transactional tdb-0.8.9.jar

I'm using jena-arq-2.9.0-incubating.jar jena-core-2.7.0-incubating.jar 
jena-iri-0.9.0-incubating.jar jena-tdb-0.9.0-incubating.jar.

Please would you take a look at the following code and give me a hint about 
what I am doing wrong.

Milorad

You can't directly access the prefix mapping for a dataset, especially for a live dataset.

You are creating an alternative prefix table over the top of the dataset's own one. Directly opening the storage is not recommended - it may appear to work in 0.8.X but you are also bypassing the cache so you'll probably miss some updates.

The only reason you are not corrupting the database is that it isn't being flushed to disk. It is not part of the transaction is you do:

SetupTDB.makePrefixes

it is in no way connected to the dataset let alone the read transaction.

I suggest get the prefixes from the default model, then iterate over the named models to get prefixes for each. Working at the model will get you the prefixes.

The interface to the datasets prefixes is not formalised.

(There's probably a way to get the transaction view of the prefix mapping but I'm on a machine that is too small to run an IDE to find out - the model route is better for the moment as it replies on formal interfaces.)

        Andy




=======================================================
import java.io.File;
import java.util.Iterator;
import java.util.Map;

import org.openjena.atlas.lib.FileOps;

import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.ReadWrite;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.reasoner.ReasonerRegistry;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.shared.PrefixMapping;
import com.hp.hpl.jena.sparql.core.DatasetPrefixStorage;
import com.hp.hpl.jena.tdb.TDB;
import com.hp.hpl.jena.tdb.TDBFactory;
import com.hp.hpl.jena.tdb.base.block.FileMode;
import com.hp.hpl.jena.tdb.base.file.Location;
import com.hp.hpl.jena.tdb.sys.DatasetControlNone;
import com.hp.hpl.jena.tdb.sys.SetupTDB;
import com.hp.hpl.jena.tdb.sys.SystemTDB;
import com.hp.hpl.jena.update.UpdateAction;
import com.hp.hpl.jena.update.UpdateFactory;
import com.hp.hpl.jena.update.UpdateRequest;

public class rdfStorePrefixesTesting {
     static File m_baseDir;
     static Dataset m_dataset;

     public static void prefixModel(Dataset dataset){
         if(dataset!=null){
             dataset.begin(ReadWrite.READ);

             Location datasetLocation = new Location(m_baseDir.getPath());
             System.out.println("Location of the dataset is: "+datasetLocation);
             DatasetPrefixStorage prefixes = 
SetupTDB.makePrefixes(datasetLocation, new DatasetControlNone()) ;
             System.out.println("Prefix mapping is: 
"+prefixes.getPrefixMapping());
             System.out.println("graphNames in prefixes are: 
"+prefixes.graphNames());
             System.out.println("graphNames in dataset are: ");
             for(Iterator<String>  i = dataset.listNames();i.hasNext();){
                 String name = i.next();
                 System.out.println(name);
                 System.out.println("1: 
"+dataset.getNamedModel(name).getNsPrefixMap());
                 System.out.println("2: 
"+dataset.getNamedModel(name).getGraph().getPrefixMapping());
             }
             for ( String gn : prefixes.graphNames() ){
                 System.out.println("Graph: "+gn) ;
                 PrefixMapping pmap = prefixes.getPrefixMapping(gn) ;

                 Map<String, String>  x = pmap.getNsPrefixMap() ;
                 for ( String k : x.keySet() )
                     System.out.println(k+":" + x.get(k)) ;
             }

             dataset.end();
         }
     }

     public static void executeUpdateSPARQL(String graphName, String querystr){
         m_dataset.begin(ReadWrite.WRITE);
         try {
             UpdateRequest updateRequest = UpdateFactory.create(querystr);

             System.out.println("executeUpdateSPARQL: "+updateRequest);
             Model m = m_dataset.getNamedModel(graphName);
             UpdateAction.execute(updateRequest, m);
             m_dataset.commit() ;
         }catch (Exception e){
             System.out.println(e);
         } finally {
             m_dataset.end();
         }
     }

     /**
      * @param args
      */
     public static void main(String[] args) {

         String baseDirPath = "C:\\Temp";
         m_baseDir = new File(baseDirPath+File.separator+"tdb");
         boolean succ = m_baseDir.exists()&&  m_baseDir.isDirectory();
         if(!succ) succ = m_baseDir.mkdir();
         if(!succ){
             m_baseDir = null;
             m_dataset = null;
             System.err.print("Error opening/creating new folder: 
"+baseDirPath);
         }else{
             FileOps.clearDirectory(m_baseDir.getPath()) ;
             m_dataset = TDBFactory.createDataset(m_baseDir.getPath()) ;
         }

         String querystr;

         querystr = "LOAD<http://geni-orca.renci.org/owl/ben-6509.rdf>";
         System.err.println("Running "+querystr);
         executeUpdateSPARQL("http://geni-orca.renci.org/owl/ben-6509.rdf";,
                 querystr);
         System.err.println("==== finished ====");

         querystr = "LOAD<http://geni-orca.renci.org/owl/nlr.rdf>";
         System.err.println("Running "+querystr);
         executeUpdateSPARQL("http://geni-orca.renci.org/owl/nlr.rdf";,
                 querystr);
         System.err.println("==== finished ====");

         prefixModel(m_dataset);

         m_dataset.close();
     }


}


=======================================================




________________________________
From: Andy Seaborne<[email protected]>
To: [email protected]
Sent: Thursday, April 12, 2012 3:24 PM
Subject: Re: PrefixMapping from TDB

On 12/04/12 14:12, Emilio Miguelanez wrote:
Hello Andy,

The prefixes.dat is zero.

For reference, I use tbdloader2 (I believe it is better than tdbloader), and it 
uses a rdf file to populate the tdb

I used your code and it worked quite well. I managed to retrieve the prefixes,  
and the prefixes.dat is not zero now

So I guess this piece of code is a work around to the issue with bulkloader.

Yes - you can non-bulkloadd a data file of prefixes (no need for any
triples).

     Andy


cheers,
Emilio

On 12 Apr 2012, at 11:53, Andy Seaborne wrote:

On 11/04/12 14:58, Emilio Miguelanez wrote:
Hi,

How can I get the prefixes from the TDB store using latest tdb version 0.9.0?

Previously, using version 0.8.10, I managed to get the prefixes using the 
following code

Model bModel = TDBFactory.createModel(repoDir);
OntModel aModel = ModelFactory.createOntologyModel(spec, bModel);
Map<String, String>    prefixMap = new HashMap<String, String>();
prefixMap = aModel.getNsPrefixMap();


Now, with the latest version and in order to support transactions, the code has 
changed to:

Dataset aDataset = TDBFactory.createDataset(repoDir);
Map<String, String>    prefixMap = new HashMap<String, String>();
Model model = dataset.getDefaultModel();
prefixMap = model.getNsPrefixMap();

but it doesn't retrieve all prefixes from the model.

Should I use a different method or API to retrieve the prefixes stored in the 
tdb?


Cheers,
Emilio

Hi there,

There is a problem with the bulkloader (see JIRA JENA-175).

Could you look in the DB directory and see if the prefixes.dat is zero or not?

I tried the code below and (on a disk-backed database) and it printed the 
prefixes:

     Andy


-----------------------------
package dev;

import java.util.Map ;

import org.openjena.atlas.lib.FileOps ;
import org.openjena.riot.SysRIOT ;

import com.hp.hpl.jena.query.Dataset ;
import com.hp.hpl.jena.query.DatasetFactory ;
import com.hp.hpl.jena.query.ReadWrite ;
import com.hp.hpl.jena.shared.PrefixMapping ;
import com.hp.hpl.jena.sparql.core.DatasetGraph ;
import com.hp.hpl.jena.sparql.core.DatasetPrefixStorage ;
import com.hp.hpl.jena.tdb.TDB ;
import com.hp.hpl.jena.tdb.TDBFactory ;
import com.hp.hpl.jena.tdb.store.DatasetGraphTDB ;
import com.hp.hpl.jena.tdb.sys.DatasetControlNone ;
import com.hp.hpl.jena.tdb.sys.SetupTDB ;
import com.hp.hpl.jena.tdb.transaction.DatasetGraphTransaction ;
import com.hp.hpl.jena.util.FileManager ;

public class Jena_TDB_prefixes
{
public static void main(String ... argv)
{
      FileOps.clearDirectory("DB") ;
      SysRIOT.wireIntoJena() ;
      DatasetGraphTransaction dsg = 
(DatasetGraphTransaction)TDBFactory.createDatasetGraph("DB") ;
      Dataset ds = DatasetFactory.create(dsg) ;
      ds.begin(ReadWrite.WRITE) ;
      FileManager.get().readModel(ds.getDefaultModel(), "D.ttl") ;
      ds.commit() ;
      ds.end() ;

      System.out.println("Prefixes (DS):") ;

      DatasetPrefixStorage prefixes = SetupTDB.makePrefixes(dsg.getLocation(), 
new DatasetControlNone()) ;
      for ( String gn : prefixes.graphNames() )
      {
          System.out.println("Graph: "+gn) ;
          PrefixMapping pmap = prefixes.getPrefixMapping(gn) ;
          Map<String, String>   x = pmap.getNsPrefixMap() ;
          for ( String k : x.keySet() )
              System.out.printf("  %-10s %s\n", k+":", x.get(k)) ;
      }


      System.out.println("Prefixes (dft model):") ;
      ds.begin(ReadWrite.READ) ;
      Map<String, String>   mapping = ds.getDefaultModel().getNsPrefixMap() ;
      System.out.println(mapping) ;

      ds.end() ;
}
}

-----------------------------
---- D.ttl
@prefix dc:<http://purl.org/dc/elements/1.1/>   .
@prefix ns:<http://example.org/ns#>   .

@prefix :<http://example.org/book/>   .

:s :p :o .







Reply via email to