Well...

Here is the query

PREFIX : <http://purl.bdrc.io/ontology/core/>  
PREFIX adm: <http://purl.bdrc.io/ontology/admin/>  
PREFIX bdr: <http://purl.bdrc.io/resource/>  
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> 
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> 
PREFIX skos: <http://www.w3.org/2004/02/skos/core#>  
PREFIX tbr: <http://purl.bdrc.io/ontology/toberemoved/> 
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> 
PREFIX f: <java:io.bdrc.ldsearch.query.functions.CustomARQFunctions.>

SELECT DISTINCT ?l 
WHERE { 
?x skos:prefLabel ?l .  
FILTER (f:myFilter(?l) < 100) 
}

Note that whene I change FILTER (f:myFilter(?l) < 100)  to FILTER
(STRLEN(?l) < 100), I don't get the 404 exception...
Therefore it's not a connection issue. I thing it's more like a
unresolved URI or so.

Now, since you'are asking for it, here is the full fuseki config:

################################################################
# Fuseki configuration for BDRC, configures two endpoints:
#   - /bdrc is read-only
#   - /bdrcrw is read-write
#
# This was painful to come up with but the web interface basically
allows no option
# and there is no subclass inference by default so such a configuration
file is necessary.
#
# The main doc sources are:
#  - https://jena.apache.org/documentation/fuseki2/fuseki-configuration
.html
#  - https://jena.apache.org/documentation/assembler/assembler-howto.ht
ml
#  - https://jena.apache.org/documentation/assembler/assembler.ttl
#
# See https://jena.apache.org/documentation/fuseki2/fuseki-layout.html
for the destination of this file.

@prefix fuseki:  <http://jena.apache.org/fuseki#> .
@prefix rdf:     <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs:    <http://www.w3.org/2000/01/rdf-schema#> .
@prefix tdb:     <http://jena.hpl.hp.com/2008/tdb#> .
# @prefix tdb2:    <http://jena.apache.org/2016/tdb#> .
@prefix ja:      <http://jena.hpl.hp.com/2005/11/Assembler#> .
@prefix :        <http://base/#> .
@prefix text:    <http://jena.apache.org/text#> .
@prefix skos:    <http://www.w3.org/2004/02/skos/core#> .
@prefix adm:     <http://purl.bdrc.io/ontology/admin/> .
@prefix bdd:     <http://purl.bdrc.io/data/> .
@prefix bdo:     <http://purl.bdrc.io/ontology/core/> .
@prefix bdr:     <http://purl.bdrc.io/resource/> .
@prefix f: <java:io.bdrc.ldsearch.query.functions.CustomARQFunctions.>
.

ja:loadClass "io.bdrc.ldsearch.query.functions.CustomARQFunctions" .
# [] ja:loadClass "org.seaborne.tdb2.TDB2" .
# tdb2:DatasetTDB2  rdfs:subClassOf  ja:RDFDataset .
# tdb2:GraphTDB2    rdfs:subClassOf  ja:Model .

[] rdf:type fuseki:Server ;
   fuseki:services (
     :bdrcrw
#      :bdrcro
   ) .

:bdrcrw rdf:type fuseki:Service ;
    fuseki:name                       "bdrcrw" ;     # name of the
dataset in the url
    fuseki:serviceQuery               "query" ;    # SPARQL query
service
    fuseki:serviceUpdate              "update" ;   # SPARQL update
service
    fuseki:serviceUpload              "upload" ;   # Non-SPARQL upload
service
    fuseki:serviceReadWriteGraphStore "data" ;     # SPARQL Graph store
protocol (read and write)
    fuseki:dataset                    :bdrc_text_dataset ;
    .

# :bdrcro rdf:type fuseki:Service ;
#     fuseki:name                     "bdrc" ;
#     fuseki:serviceQuery             "query" ;
#     fuseki:serviceReadGraphStore    "data" ;
#     fuseki:dataset                    :bdrc_text_dataset ;
#     .

# using TDB
:dataset_bdrc rdf:type      tdb:DatasetTDB ;
     tdb:location "/etc/fuseki/databases/bdrc" ;
     tdb:unionDefaultGraph true ;
     .

# # try using TDB2
# :dataset_bdrc rdf:type      tdb2:DatasetTDB2 ;
#      tdb2:location "/etc/fuseki/databases/bdrc" ;
#      tdb2:unionDefaultGraph true ;
#   .

:bdrc_text_dataset rdf:type     text:TextDataset ;
    text:dataset   :dataset_bdrc ;
    text:index     :bdrc_lucene_index ;
    .

# Text index description
:bdrc_lucene_index a text:TextIndexLucene ;
    text:directory <file:/etc/fuseki/lucene-bdrc> ;
    text:storeValues true ;
    text:multilingualSupport true ;
    text:entityMap :bdrc_entmap ;
    text:defineAnalyzers (
        [ text:addLang "bo" ; 
          text:analyzer [ 
            a text:GenericAnalyzer ;
            text:class "io.bdrc.lucene.bo.TibetanAnalyzer" ;
            text:params (
                [ text:paramName "segmentInWords" ;
                  text:paramType text:TypeBoolean ; 
                  text:paramValue false ]
                [ text:paramName "lemmatize" ;
                  text:paramType text:TypeBoolean ;
                  text:paramValue true ]
                [ text:paramName "filterChars" ;
                  text:paramType text:TypeBoolean ;
                  text:paramValue false ]
                [ text:paramName "fromEwts" ;
                  text:paramType text:TypeBoolean ;
                  text:paramValue false ]
                )
            ] ; 
          ]
        [ text:addLang "bo-x-ewts" ; 
          text:analyzer [ 
            a text:GenericAnalyzer ;
            text:class "io.bdrc.lucene.bo.TibetanAnalyzer" ;
            text:params (
                [ text:paramName "segmentInWords" ;
                  text:paramType text:TypeBoolean ; 
                  text:paramValue false ]
                [ text:paramName "lemmatize" ;
                  text:paramType text:TypeBoolean ;
                  text:paramValue true ]
                [ text:paramName "filterChars" ;
                  text:paramType text:TypeBoolean ;
                  text:paramValue false ]
                [ text:paramName "fromEwts" ;
                  text:paramType text:TypeBoolean ;
                  text:paramValue true ]
                )
            ] ; 
          ]
      ) ;
    .

# Index mappings
:bdrc_entmap a text:EntityMap ;
    text:entityField      "uri" ;
    text:uidField         "uid" ;
    text:defaultField     "label" ;
    text:langField        "lang" ;
    text:graphField       "graph" ; ## enable graph-specific indexing
    text:map (
         [ text:field "label" ; 
           text:predicate skos:prefLabel ]
         [ text:field "altLabel" ; 
           text:predicate skos:altLabel ; ]
         [ text:field "rdfsLabel" ;
           text:predicate rdfs:label ; ]
         [ text:field "chunkContents" ;
           text:predicate bdo:chunkContents ; ]
         [ text:field "eTextTitle" ;
           text:predicate bdo:eTextTitle ; ]
         [ text:field "logMessage" ;
           text:predicate adm:logMessage ; ]
         [ text:field "noteText" ;
           text:predicate bdo:noteText ; ]
         [ text:field "workAuthorshipStatement" ;
           text:predicate bdo:workAuthorshipStatement ; ]
         [ text:field "workColophon" ; 
           text:predicate bdo:workColophon ; ]
         [ text:field "workEditionStatement" ;
           text:predicate bdo:workEditionStatement ; ]
         [ text:field "workPublisherLocation" ;
           text:predicate bdo:workPublisherLocation ; ]
         [ text:field "workPublisherName" ;
           text:predicate bdo:workPublisherName ; ]
         [ text:field "workSeriesName" ;
           text:predicate bdo:workSeriesName ; ]
         ) ;
    .
###################################################################

It would be wonderful to have the java:URI scheme thing working (all
custom functions in a single class and method calls done directly in
the sparql query : sounds like a dream !)

Marc

Le mardi 26 décembre 2017 à 13:22 -0500, ajs6f a écrit :
> That exception doesn't appear to have anything to do with extension
> functions. It indicates a problem between client and server.
> 
> Please show at _least_ your actual query execution code, your
> complete Fuseki config, and a complete stacktrace.
> 
> 
> ajs6f
> 
> > On Dec 26, 2017, at 1:17 PM, Marc Agate <[email protected]>
> > wrote:
> > 
> > I forgot to mention that according to 
> > https://jena.apache.org/documentation/query/java-uri.html
> > I tried for testing purpose to set a PREFIX f:
> > <java:io.bdrc.ldsearch.query.functions.CustomARQFunctions.>and
> > added
> > the following to fuseki config : 
> > ja:loadClass "io.bdrc.ldsearch.query.functions.CustomARQFunctions"
> > .
> > where CustomARQFunctions is :
> > public class CustomARQFunctions {           public static
> > NodeValue myFilter(NodeValue value1){                       int i
> > =
> > value1.asString().length();         return
> > NodeValue.makeInteger(i);     }
> > }
> > since according to 
> > https://jena.apache.org/documentation/query/writing_functions.html
> > using the java:URI scheme "dynamically loads the code, which must
> > be on
> > the Java classpath. With this scheme, the function URI gives the
> > class
> > name. There is automatic registration of a wrapper into the
> > function
> > registry. This way, no explicit registration step is needed by the
> > application and queries issues with the command line tools can load
> > custom functions."
> > but no luck: I keep getting the following exception:
> > Exception in thread "main" HttpException: 404       at
> > org.apache.jena.sparql.engine.http.HttpQuery.execGet(HttpQuery.java
> > :328
> > )   at
> > org.apache.jena.sparql.engine.http.HttpQuery.exec(HttpQuery.java:28
> > 8)  
> > at
> > org.apache.jena.sparql.engine.http.QueryEngineHTTP.execResultSetInn
> > er(Q
> > ueryEngineHTTP.java:348)    at
> > org.apache.jena.sparql.engine.http.QueryEngineHTTP.execSelect(Query
> > Engi
> > neHTTP.java:340)
> > I am stuck !
> > Marc
> > Le mardi 26 décembre 2017 à 18:56 +0100, Marc Agate a écrit :
> > > Hi,
> > > Adam's gave me the right direction.
> > > I managed to load my function class in fuseki config using
> > > ja:loadClass
> > > but now remains the following issue (the function is not
> > > registered)seefuseki logs :
> > > [2017-12-26 16:10:13] exec       WARN  URI <http://purl.bdrc.io/f
> > > unct
> > > ions#MyFilterFunction> has no registered function factory
> > > How can I register this function now that I have the code
> > > available
> > > onthe endpoint side ?
> > > Thanks for helping
> > > Marc.
> > > 
> > > Le mardi 26 décembre 2017 à 17:43 +0000, Andy Seaborne a écrit :
> > > > As well s Adam's point (and all the libraries your function
> > > > needs, transitively)
> > > > What is in the Fuseki log file?How was the data loaded into
> > > > Fuseki?
> > > >  >> I printed out the >> FunctionRegistry
> > > >      And
> > > > On 26/12/17 14:51, ajs6f wrote:
> > > > > I'm not as familiar with the extension points of ARQ as I
> > > > > wouldlike to be, but as I understand what you are doing, you
> > > > > areregistering a new function with your _local_ registry,
> > > > > then
> > > > > firinga query at a _remote_ endpoint (which has a completely
> > > > > independentregistry in a different JVM in a different
> > > > > process,
> > > > > potentially ina different _system_).
> > > > > The query is getting interpreted and executed by that
> > > > > remoteservice, not locally. So you need to register the
> > > > > function
> > > > > _there_.
> > > > > Take a look at this thread:
> > > > > https://lists.apache.org/thread.html/1cda23332af4264883e88697
> > > > > d994
> > > > > 605770edcde2f93ddea51240e4b8@%3Cusers.jena.apache.org%3E
> > > > > It should get you started as to how to register
> > > > > extensionfunctionality in Fuseki.
> > > > > 
> > > > > Adam Soroka
> > > > > > On Dec 26, 2017, at 9:34 AM, Marc Agate <[email protected]
> > > > > > om>
> > > > > > wrote:
> > > > > > 
> > > > > > Hi !
> > > > > > 
> > > > > > I successfully implemented sparql queries using custom ARQ
> > > > > > functions
> > > > > > using the following (custom function code):
> > > > > > 
> > > > > > ****************
> > > > > > public class LevenshteinFilter extends FunctionBase2 {
> > > > > > 
> > > > > >      public LevenshteinFilter() { super() ; }
> > > > > > 
> > > > > >      public NodeValue exec(NodeValue value1, NodeValue
> > > > > > value2){
> > > > > >          LevenshteinDistance LD=new LevenshteinDistance();
> > > > > >          int i = LD.apply(value1.asString(),
> > > > > > value2.asString());
> > > > > >          return NodeValue.makeInteger(i);
> > > > > >      }
> > > > > > }
> > > > > > ***************
> > > > > > 
> > > > > > it works fine when I query against a Model loaded from a
> > > > > > turtle
> > > > > > file,
> > > > > > like this:
> > > > > > 
> > > > > > ***************
> > > > > > InputStream input =
> > > > > > QueryProcessor.class.getClassLoader().getResourceAsStream("
> > > > > > full
> > > > > > .t
> > > > > > tl");
> > > > > >              model =
> > > > > > ModelFactory.createMemModelMaker().createModel("default");
> > > > > >              model.read(input,null,"TURTLE"); // null base
> > > > > > URI,
> > > > > > since
> > > > > > model URIs are absolute
> > > > > >              input.close();
> > > > > > ***************
> > > > > > 
> > > > > > with the query being sent like this :
> > > > > > 
> > > > > > ***************
> > > > > > String functionUri = "http://www.example1.org/LevenshteinFu
> > > > > > ncti
> > > > > > on
> > > > > > ";
> > > > > >          FunctionRegistry.get().put(functionUri ,
> > > > > > LevenshteinFilter.class);
> > > > > > 
> > > > > >          String s = "whatever you want";
> > > > > >          String sparql = prefixes+" SELECT DISTINCT ?l
> > > > > > WHERE {
> > > > > > ?x
> > > > > > rdfs:label ?l . " +  "FILTER(fct:LevenshteinFunction(?l,
> > > > > > \"" +
> > > > > > s
> > > > > > + "\")
> > > > > > < 4) }";
> > > > > >          Query query = QueryFactory.create(sparql);
> > > > > >          QueryExecution qexec =
> > > > > > QueryExecutionFactory.create(query,
> > > > > > model);
> > > > > >          ResultSet rs = qexec.execSelect();
> > > > > > ***************
> > > > > > 
> > > > > > However, if i use a working fuseki endpoint for the same
> > > > > > dataset
> > > > > > (full.ttl) like this :
> > > > > > 
> > > > > > ***************
> > > > > > fusekiUrl="http://localhost:3030/ds/query";;
> > > > > > ***************
> > > > > > 
> > > > > > sending the query like this (using
> > > > > > QueryExecutionFactory.sparqlService(fusekiUrl,query)
> > > > > > instead of
> > > > > > QueryExecutionFactory.create(query,model) ):
> > > > > > 
> > > > > > ***************
> > > > > > String functionUri = "http://www.example1.org/LevenshteinFu
> > > > > > ncti
> > > > > > on
> > > > > > ";
> > > > > >          FunctionRegistry.get().put(functionUri ,
> > > > > > LevenshteinFilter.class);
> > > > > > 
> > > > > >          String s = "whatever you want";
> > > > > >          String sparql = prefixes+" SELECT DISTINCT ?l
> > > > > > WHERE {
> > > > > > ?x
> > > > > > rdfs:label ?l . " + "FILTER(fct:LevenshteinFunction(?l, \""
> > > > > > + s
> > > > > > +
> > > > > > "\")
> > > > > > < 4) }";
> > > > > >          Query query = QueryFactory.create(sparql);
> > > > > >          QueryExecution qexec =
> > > > > > QueryExecutionFactory.sparqlService(fusekiUrl,query);
> > > > > >          ResultSet rs = qexec.execSelect();
> > > > > > ***************
> > > > > > 
> > > > > > Then I don't get any results back. In both cases I printed
> > > > > > out
> > > > > > the
> > > > > > FunctionRegistry and they contain exactly the same entries,
> > > > > > especially
> > > > > > :
> > > > > > 
> > > > > > key=http://www.example1.org/LevenshteinFunction value:
> > > > > > org.apache.jena.sparql.function.FunctionFactoryAuto@5a45133
> > > > > > e
> > > > > > 
> > > > > > Any clue ?
> > > > > > 
> > > > > > Thanks
> 
> 

Reply via email to