Skip to content

Commit

Permalink
rpt integrate now runs dbms detection to activate an initial profile …
Browse files Browse the repository at this point in the history
…which can be used in dbms specific macros.
  • Loading branch information
Aklakan committed Sep 28, 2023
1 parent 2305472 commit 4c5a75e
Showing 1 changed file with 23 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -384,23 +384,37 @@ public static int sparqlIntegrate(CmdSparqlIntegrateMain cmd) throws Exception {
dataSourceTmp = RdfDataEngines.decorate(dataSourceTmp, new RdfDataSourceDecoratorSansa());
}

String rawBnodeProfile = cmd.bnodeProfile;

if ("auto".equalsIgnoreCase(rawBnodeProfile)) {
try (RDFConnection conn = dataSourceTmp.getConnection()) {
rawBnodeProfile = RdfDataSourceWithBnodeRewrite.detectProfile(conn);
// Attempt to detect the dbms name.
// If one is detected then use it as an active profile name.
String dmbsProfile = null;
try (RDFConnection conn = dataSourceTmp.getConnection()) {
dmbsProfile = RdfDataSourceWithBnodeRewrite.detectProfile(conn);
if (logger.isInfoEnabled()) {
logger.info("Detected DBMS: " + dmbsProfile);
}
}

String bnodeProfile = rawBnodeProfile;
String bnodeProfile = cmd.bnodeProfile;
if ("auto".equalsIgnoreCase(bnodeProfile)) {
bnodeProfile = dmbsProfile;
}

Set<String> macroProfiles = new HashSet<>();
if (dmbsProfile != null) {
macroProfiles.add(dmbsProfile);
}

if (!Strings.isNullOrEmpty(bnodeProfile)) {
RdfDataSourceDecorator decorator = (x, conf) -> new RdfDataSourceWithBnodeRewrite(x, bnodeProfile);
dataSourceTmp = RdfDataEngines.decorate(dataSourceTmp, decorator);
dataSourceTmp = RdfDataEngines.of(
new RdfDataSourceWithBnodeRewrite(dataSourceTmp, bnodeProfile),
dataSourceTmp::close);
// RdfDataSourceDecorator decorator = (x, conf) -> new RdfDataSourceWithBnodeRewrite(x, bnodeProfile);
// dataSourceTmp = RdfDataEngines.decorate(dataSourceTmp, decorator);
}

// Load function macros (run sparql inferences first)
Map<String, UserDefinedFunctionDefinition> udfRegistry = new LinkedHashMap<>();
for (String macroSource : cmd.macroSources) {
Set<String> macroProfiles = new HashSet<>();
Model model = RDFDataMgr.loadModel(macroSource);
SparqlStmtMgr.execSparql(model, "udf-inferences.sparql");
Map<String, UserDefinedFunctionDefinition> contrib = UserDefinedFunctions.load(model, macroProfiles);
Expand All @@ -417,7 +431,6 @@ public static int sparqlIntegrate(CmdSparqlIntegrateMain cmd) throws Exception {

RdfDataEngine datasetAndDelete = dataSourceTmp;


// Dataset dataset = datasetAndDelete.getKey();
// Closeable deleteAction = datasetAndDelete.getValue();
Thread shutdownHook = new Thread(() -> {
Expand Down

0 comments on commit 4c5a75e

Please sign in to comment.