package com.franz.ag.repository.examples;
import static com.franz.ag.repository.examples.AGRepositoryValueFactory.*;
import com.franz.ag.repository.AGRepository;
import com.franz.agbase.AllegroGraph;
import com.franz.agbase.AllegroGraphConnection;
import com.franz.agbase.AllegroGraphException;
import com.franz.agbase.examples.AGPaths;
import org.openrdf.model.ValueFactory;
import org.openrdf.query.BooleanQuery;
import org.openrdf.query.QueryLanguage;
import org.openrdf.query.impl.DatasetImpl;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
public class AGRepositoryDataset {
/**
* Demonstrates some basics of querying with respect to a Dataset.
*
* @param args unused
* @throws AllegroGraphException
*/
public static void main(String[] args) throws Exception {
// Connect to the server, which must already be running.
AllegroGraphConnection ags = new AllegroGraphConnection();
try {
ags.enable();
} catch (Exception e) {
throw new AllegroGraphException("Server connection problem", e);
}
// Get a connection to a fresh repository
AllegroGraph ts = ags.renew("repositorytuplequery", AGPaths.TRIPLE_STORES);
Repository repo = new AGRepository(ts);
repo.initialize();
RepositoryConnection repoConn = repo.getConnection();
// Get the ValueFactory for the Repository and create some example values.
ValueFactory vf = repo.getValueFactory();
createExampleValues(vf);
repoConn.add(alice, name, nameAlice, context2);
repoConn.add(alice, mbox, mboxAlice, context2);
repoConn.add(context2, publisher, nameAlice);
repoConn.add(bob, name, nameBob, context1);
repoConn.add(bob, mbox, mboxBob, context1);
repoConn.add(context1, publisher, nameBob);
StringBuilder queryBuilder = new StringBuilder();
queryBuilder.append("PREFIX foaf: <" + FOAF_NS + "> ");
queryBuilder.append("ASK ");
queryBuilder.append("{ ?p foaf:name ?name }");
BooleanQuery query = repoConn.prepareBooleanQuery(QueryLanguage.SPARQL, queryBuilder.toString());
query.setBinding("name", nameBob);
DatasetImpl dataset = new DatasetImpl();
System.out.println("Query result is " + query.evaluate() + " using dataset: ");
System.out.println(dataset);
// default graph: {context1}
dataset.addDefaultGraph(context1);
query.setDataset(dataset);
System.out.println("Query result is " + query.evaluate() + " using dataset: ");
System.out.println(dataset);
// default graph: {context1, context2}
dataset.addDefaultGraph(context2);
query.setDataset(dataset);
System.out.println("Query result is " + query.evaluate() + " using dataset: ");
System.out.println(dataset);
// default graph: {context2}
dataset.removeDefaultGraph(context1);
query.setDataset(dataset);
System.out.println("Query result is " + query.evaluate() + " using dataset: ");
System.out.println(dataset);
queryBuilder.setLength(0);
queryBuilder.append("PREFIX foaf: <" + FOAF_NS + "> ");
queryBuilder.append("ASK ");
queryBuilder.append("{ GRAPH ?g { ?p foaf:name ?name } }");
query = repoConn.prepareBooleanQuery(QueryLanguage.SPARQL, queryBuilder
.toString());
query.setBinding("name", nameBob);
// default graph: {context2}; named graph: {}
query.setDataset(dataset);
System.out.println("Query result is " + query.evaluate() + " using dataset: ");
System.out.println(dataset);
// default graph: {context1, context2}; named graph: {context2}
dataset.addDefaultGraph(context1);
dataset.addNamedGraph(context2);
query.setDataset(dataset);
System.out.println("Query result is " + query.evaluate() + " using dataset: ");
System.out.println(dataset);
// default graph: {context1, context2}; named graph: {context1,
// context2}
dataset.addNamedGraph(context1);
query.setDataset(dataset);
System.out.println("Query result is " + query.evaluate() + " using dataset: ");
System.out.println(dataset);
// Close the RepositoryConnection and shutdown the Repository
// Close the store and disconnect from the server
repoConn.close();
repo.shutDown();
ts.closeTripleStore();
ags.disable();
}
}
Copyright © 2023 Franz Inc., All Rights Reserved | Privacy Statement |