diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..9f11b75
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+.idea/
diff --git a/src/divide-central/divide-api/pom.xml b/src/divide-central/divide-api/pom.xml
new file mode 100644
index 0000000..dd79f9a
--- /dev/null
+++ b/src/divide-central/divide-api/pom.xml
@@ -0,0 +1,55 @@
+
+
+
+ divide
+ be.ugent.idlab
+ 1.0
+
+ 4.0.0
+
+ divide-api
+
+
+
+ maven-restlet
+ Public online Restlet repository
+ https://maven.restlet.org
+
+
+
+
+
+
+ be.ugent.idlab
+ divide-engine
+ 1.0
+
+
+
+
+ org.restlet.jse
+ org.restlet
+ 2.3.6
+
+
+ org.restlet.jee
+ org.restlet.ext.slf4j
+ 2.2.2
+
+
+ slf4j-api
+ org.slf4j
+
+
+
+
+
+
+ com.google.code.gson
+ gson
+ 2.8.5
+
+
+
\ No newline at end of file
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiApplication.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiApplication.java
new file mode 100644
index 0000000..57cce05
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiApplication.java
@@ -0,0 +1,65 @@
+package be.ugent.idlab.divide.api;
+
+import be.ugent.idlab.divide.api.endpoints.component.ComponentEndpoint;
+import be.ugent.idlab.divide.api.endpoints.component.GeneralComponentEndpoint;
+import be.ugent.idlab.divide.api.endpoints.query.DivideQueryEndpoint;
+import be.ugent.idlab.divide.api.endpoints.query.DivideQueryRegistrationAsRspQlEndpoint;
+import be.ugent.idlab.divide.api.endpoints.query.DivideQueryRegistrationAsSparqlEndpoint;
+import be.ugent.idlab.divide.api.endpoints.query.GeneralDivideQueryEndpoint;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import org.restlet.Application;
+import org.restlet.Restlet;
+import org.restlet.routing.Router;
+import org.restlet.routing.Template;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DivideApiApplication extends Application {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DivideApiApplication.class.getName());
+
+ public static final String ATTR_DIVIDE_ENGINE = "divide_engine";
+
+ private final IDivideEngine divideEngine;
+
+ public DivideApiApplication(IDivideEngine divideEngine) {
+ this.divideEngine = divideEngine;
+ }
+
+ @Override
+ public Restlet createInboundRoot() {
+ getContext().getAttributes().put(ATTR_DIVIDE_ENGINE, divideEngine);
+
+ Router router = new Router(getContext());
+ router.setDefaultMatchingMode(Template.MODE_EQUALS);
+
+ router.attach(DivideRoutes.ENDPOINT_COMPONENT, ComponentEndpoint.class);
+ LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_COMPONENT);
+ ComponentEndpoint.logEndpoints(LOGGER);
+
+ router.attach(DivideRoutes.ENDPOINT_COMPONENT_GENERAL, GeneralComponentEndpoint.class);
+ LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_COMPONENT_GENERAL);
+ GeneralComponentEndpoint.logEndpoints(LOGGER);
+
+ router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY, DivideQueryEndpoint.class);
+ LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY);
+ DivideQueryEndpoint.logEndpoints(LOGGER);
+
+ router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_SPARQL,
+ DivideQueryRegistrationAsSparqlEndpoint.class);
+ LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_SPARQL);
+ DivideQueryRegistrationAsSparqlEndpoint.logEndpoints(LOGGER);
+
+ router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_RSP_QL,
+ DivideQueryRegistrationAsRspQlEndpoint.class);
+ LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_RSP_QL);
+ DivideQueryRegistrationAsRspQlEndpoint.logEndpoints(LOGGER);
+
+ router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY_GENERAL, GeneralDivideQueryEndpoint.class);
+ LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY_GENERAL);
+ GeneralDivideQueryEndpoint.logEndpoints(LOGGER);
+
+ return router;
+ }
+
+}
\ No newline at end of file
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiComponentFactory.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiComponentFactory.java
new file mode 100644
index 0000000..30c6a07
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiComponentFactory.java
@@ -0,0 +1,58 @@
+package be.ugent.idlab.divide.api;
+
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import org.restlet.Component;
+import org.restlet.data.Protocol;
+
+@SuppressWarnings("unused")
+public class DivideApiComponentFactory {
+
+ /**
+ * Create a Restlet {@link Component} that can be started to host an API
+ * for the given DIVIDE engine. This DIVIDE API will be hosted via the
+ * HTTP protocol on the given host and port, on the root path,
+ * i.e., at http://[host]:[port]/.
+ *
+ * @param divideEngine DIVIDE engine that should be wrapped by the created
+ * API component
+ * @param host host at which the DIVIDE API should run
+ * @param port port at which the DIVIDE API should run
+ * @return a Restlet {@link Component} which can be started with the
+ * {@link Component#start()} method to host the DIVIDE API
+ */
+ public static Component createRestApiComponent(IDivideEngine divideEngine,
+ String host,
+ int port) {
+ return createRestApiComponent(divideEngine, host, port, "");
+ }
+
+ /**
+ * Create a Restlet {@link Component} that can be started to host an API
+ * for the given DIVIDE engine. This DIVIDE API will be hosted via the
+ * HTTP protocol on the given host and port, on the specified uri path,
+ * i.e., at http://[host]:[port]/[uri].
+ *
+ * @param divideEngine DIVIDE engine that should be wrapped by the created
+ * API component
+ * @param host host at which the DIVIDE API should run
+ * @param port port at which the DIVIDE API should run
+ * @param uri path URI string at which the DIVIDE API should run
+ * @return a Restlet {@link Component} which can be started with the
+ * {@link Component#start()} method to host the DIVIDE API
+ */
+ public static Component createRestApiComponent(IDivideEngine divideEngine,
+ String host,
+ int port,
+ String uri) {
+ // create Restlet component
+ Component component = new Component();
+ component.getServers().add(Protocol.HTTP, host, port);
+
+ // create and attach Servlet application
+ DivideApiApplication divideApiApplication = new DivideApiApplication(divideEngine);
+ component.getDefaultHost().attach(uri, divideApiApplication);
+
+ return component;
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideRoutes.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideRoutes.java
new file mode 100644
index 0000000..93b49ec
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideRoutes.java
@@ -0,0 +1,24 @@
+package be.ugent.idlab.divide.api;
+
+import static be.ugent.idlab.divide.api.endpoints.CustomEndpoint.SERVER_ATTR_ID;
+
+class DivideRoutes {
+
+ private static final String COMPONENT_ENTITY = "component";
+ private static final String QUERY_ENTITY = "query";
+
+ static final String ENDPOINT_COMPONENT_GENERAL =
+ "/" + COMPONENT_ENTITY;
+ static final String ENDPOINT_COMPONENT =
+ "/" + COMPONENT_ENTITY + "/{" + SERVER_ATTR_ID + "}";
+
+ static final String ENDPOINT_DIVIDE_QUERY_GENERAL =
+ "/" + QUERY_ENTITY;
+ static final String ENDPOINT_DIVIDE_QUERY =
+ "/" + QUERY_ENTITY + "/{" + SERVER_ATTR_ID + "}";
+ static final String ENDPOINT_DIVIDE_QUERY_REGISTER_AS_SPARQL =
+ "/" + QUERY_ENTITY + "/sparql/{" + SERVER_ATTR_ID + "}";
+ static final String ENDPOINT_DIVIDE_QUERY_REGISTER_AS_RSP_QL =
+ "/" + QUERY_ENTITY + "/rspql/{" + SERVER_ATTR_ID + "}";
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/CustomEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/CustomEndpoint.java
new file mode 100644
index 0000000..b9cc7a7
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/CustomEndpoint.java
@@ -0,0 +1,20 @@
+package be.ugent.idlab.divide.api.endpoints;
+
+import be.ugent.idlab.divide.api.DivideApiApplication;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import org.restlet.resource.ServerResource;
+
+public abstract class CustomEndpoint extends ServerResource {
+
+ public static final String SERVER_ATTR_ID = "id";
+
+ protected IDivideEngine getDivideEngine() {
+ return (IDivideEngine) getContext().getAttributes().get(
+ DivideApiApplication.ATTR_DIVIDE_ENGINE);
+ }
+
+ protected String getIdAttribute() {
+ return (String) getRequest().getAttributes().get(SERVER_ATTR_ID);
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/ComponentEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/ComponentEndpoint.java
new file mode 100644
index 0000000..bb24b4c
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/ComponentEndpoint.java
@@ -0,0 +1,139 @@
+package be.ugent.idlab.divide.api.endpoints.component;
+
+import be.ugent.idlab.divide.api.endpoints.CustomEndpoint;
+import be.ugent.idlab.divide.api.representation.component.ComponentRepresentation;
+import be.ugent.idlab.divide.core.component.IComponent;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Status;
+import org.restlet.representation.Representation;
+import org.restlet.resource.Delete;
+import org.restlet.resource.Get;
+import org.restlet.resource.Options;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class ComponentEndpoint extends CustomEndpoint {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+ private static final Logger LOGGER = LoggerFactory.getLogger(ComponentEndpoint.class.getName());
+
+ @Options
+ public void optionsRequestHandler() {
+ Set allowedMethods = new HashSet<>();
+ allowedMethods.add(Method.GET);
+ allowedMethods.add(Method.DELETE);
+ getResponse().setAccessControlAllowMethods(allowedMethods);
+ getResponse().setAccessControlAllowOrigin("*");
+ }
+
+ public static void logEndpoints(Logger logger) {
+ logger.info(" GET: retrieve DIVIDE component with ID {}", SERVER_ATTR_ID);
+ logger.info(" DELETE: unregister DIVIDE component with ID {}", SERVER_ATTR_ID);
+ }
+
+ @Get
+ public void getComponent() {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ String componentId = getIdAttribute();
+
+ IComponent component = divideEngine.getRegisteredComponentById(componentId);
+
+ if (component != null) {
+ ComponentRepresentation componentRepresentation =
+ new ComponentRepresentation(component);
+
+ String message = "Component with ID " + componentId + " successfully retrieved";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(GSON.toJson(componentRepresentation),
+ MediaType.APPLICATION_JSON);
+
+ } else {
+ String message = "Component with ID '" + componentId + "' does not exist";
+ getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while getting component data";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+ @Delete
+ public void unregisterComponent(Representation rep) {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ String componentId = getIdAttribute();
+
+ IComponent component = divideEngine.getRegisteredComponentById(componentId);
+
+ if (component != null) {
+ // retrieve url parameter which specifies whether the queries of this
+ // component should be unregistered
+ // (default when it is not specified = false)
+ boolean unregisterQueries =
+ Boolean.parseBoolean(getQueryValue("unregister"));
+
+ divideEngine.unregisterComponent(componentId, unregisterQueries);
+
+ String message = "Component with ID " + componentId + " successfully unregistered";
+ getResponse().setStatus(Status.SUCCESS_NO_CONTENT, message);
+
+ } else {
+ String message = "Component with ID '" + componentId + "' does not exist";
+ getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while unregistering component";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/GeneralComponentEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/GeneralComponentEndpoint.java
new file mode 100644
index 0000000..5879e6e
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/GeneralComponentEndpoint.java
@@ -0,0 +1,160 @@
+package be.ugent.idlab.divide.api.endpoints.component;
+
+import be.ugent.idlab.divide.api.endpoints.CustomEndpoint;
+import be.ugent.idlab.divide.api.representation.component.ComponentRepresentation;
+import be.ugent.idlab.divide.core.component.IComponent;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.util.component.ComponentEntry;
+import be.ugent.idlab.divide.util.component.ComponentEntryParserException;
+import be.ugent.idlab.divide.util.component.JsonComponentEntryParser;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Status;
+import org.restlet.representation.Representation;
+import org.restlet.resource.Get;
+import org.restlet.resource.Options;
+import org.restlet.resource.Post;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+public class GeneralComponentEndpoint extends CustomEndpoint {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+ private static final Logger LOGGER = LoggerFactory.getLogger(GeneralComponentEndpoint.class.getName());
+
+ @Options
+ public void optionsRequestHandler() {
+ Set allowedMethods = new HashSet<>();
+ allowedMethods.add(Method.GET);
+ allowedMethods.add(Method.POST);
+ getResponse().setAccessControlAllowMethods(allowedMethods);
+ getResponse().setAccessControlAllowOrigin("*");
+ }
+
+ public static void logEndpoints(Logger logger) {
+ logger.info(" GET: retrieve all registered DIVIDE components");
+ logger.info(" POST: register a new DIVIDE component (description in HTTP body)");
+ }
+
+ @Get
+ public void getComponents() {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ Collection components = new ArrayList<>();
+
+ for (IComponent component : divideEngine.getRegisteredComponents()) {
+ components.add(new ComponentRepresentation(component));
+ }
+
+ String message = "Components successfully retrieved";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(GSON.toJson(components), MediaType.APPLICATION_JSON);
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while getting component data";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+ @Post
+ public void registerComponent(Representation rep) {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ if (rep != null) {
+ String componentEntryString = rep.getText();
+
+ // parse component entry
+ ComponentEntry componentEntry =
+ JsonComponentEntryParser.parseComponentEntry(componentEntryString);
+
+ // register component
+ IComponent component = divideEngine.registerComponent(
+ new ArrayList<>(componentEntry.getContextIris()),
+ componentEntry.getRspQueryLanguage(),
+ componentEntry.getRspEngineUrl());
+
+ if (component != null) {
+ String message = "Component with ID " + component.getId() +
+ " successfully registered";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(
+ GSON.toJson(new ComponentRepresentation(component)),
+ MediaType.APPLICATION_JSON);
+
+ } else {
+ String message = "Component with the specified host, port and path of the " +
+ "RSP engine URL already exists";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } else {
+ String message = "No component entry information specified";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (ComponentEntryParserException e) {
+ String message = String.format("Component entry information invalid: %s", e.getMessage());
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (DivideInvalidInputException e) {
+ String message = String.format("Component input invalid: %s", e.getMessage());
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while registering component";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryEndpoint.java
new file mode 100644
index 0000000..e3d3112
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryEndpoint.java
@@ -0,0 +1,264 @@
+package be.ugent.idlab.divide.api.endpoints.query;
+
+import be.ugent.idlab.divide.api.endpoints.CustomEndpoint;
+import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation;
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.util.query.ContextEnrichmentEntry;
+import be.ugent.idlab.divide.util.query.DivideQueryEntryInDivideFormat;
+import be.ugent.idlab.divide.util.query.DivideQueryEntryParser;
+import be.ugent.idlab.util.io.IOUtilities;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Status;
+import org.restlet.representation.Representation;
+import org.restlet.resource.Delete;
+import org.restlet.resource.Get;
+import org.restlet.resource.Options;
+import org.restlet.resource.Post;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class DivideQueryEndpoint extends CustomEndpoint {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+ private static final Logger LOGGER = LoggerFactory.getLogger(DivideQueryEndpoint.class.getName());
+
+ @Options
+ public void optionsRequestHandler() {
+ Set allowedMethods = new HashSet<>();
+ allowedMethods.add(Method.GET);
+ allowedMethods.add(Method.DELETE);
+ allowedMethods.add(Method.POST);
+ getResponse().setAccessControlAllowMethods(allowedMethods);
+ getResponse().setAccessControlAllowOrigin("*");
+ }
+
+ public static void logEndpoints(Logger logger) {
+ logger.info(" GET: retrieve DIVIDE query with ID {}", SERVER_ATTR_ID);
+ logger.info(" POST: register DIVIDE query with ID {}" +
+ " (JSON description of DIVIDE query inputs in HTTP body)", SERVER_ATTR_ID);
+ logger.info(" DELETE: unregister DIVIDE query with ID {}", SERVER_ATTR_ID);
+ }
+
+ @Get
+ public void getDivideQuery() {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ String divideQueryName = getIdAttribute();
+
+ IDivideQuery divideQuery = divideEngine.getDivideQueryByName(divideQueryName);
+
+ if (divideQuery != null) {
+ DivideQueryRepresentation divideQueryRepresentation =
+ new DivideQueryRepresentation(divideQuery);
+
+ String message = "DIVIDE query with name '" + divideQueryName + "' successfully retrieved";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(GSON.toJson(divideQueryRepresentation),
+ MediaType.APPLICATION_JSON);
+
+ } else {
+ String message = "DIVIDE query with name '" + divideQueryName + "' does not exist";
+ getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while getting DIVIDE query data";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+ @Post
+ public void addDivideQuery(Representation rep) {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ String divideQueryName = getIdAttribute();
+
+ if (divideEngine.getDivideQueryByName(divideQueryName) == null) {
+
+ if (rep != null) {
+ String divideQueryJson = rep.getText();
+
+ DivideQueryEntryInDivideFormat divideQueryEntry;
+ String queryPattern;
+ String sensorQueryRule;
+ String goal;
+ try {
+ // parse DIVIDE query JSON
+ divideQueryEntry = DivideQueryEntryParser.parseDivideQueryEntryInDivideFormat(divideQueryJson);
+ queryPattern = divideQueryEntry.getQueryPattern();
+ sensorQueryRule = divideQueryEntry.getSensorQueryRule();
+ goal = divideQueryEntry.getGoal();
+
+ } catch (Exception e) {
+ String message = "Specified DIVIDE query information is no valid JSON";
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ return;
+ }
+
+ // ensure all required information is provided
+ if (queryPattern != null && !queryPattern.isEmpty() &&
+ sensorQueryRule != null && !sensorQueryRule.isEmpty() &&
+ goal != null && !goal.isEmpty()) {
+
+ // create context enrichment
+ ContextEnrichmentEntry contextEnrichmentEntry =
+ divideQueryEntry.getContextEnrichment();
+ ContextEnrichment contextEnrichment;
+ if (contextEnrichmentEntry == null
+ || contextEnrichmentEntry.getQueries() == null
+ || contextEnrichmentEntry.getQueries().isEmpty()) {
+ contextEnrichment = new ContextEnrichment();
+ } else {
+ contextEnrichment = new ContextEnrichment(
+ contextEnrichmentEntry.doReasoning(),
+ contextEnrichmentEntry.executeOnOntologyTriples(),
+ contextEnrichmentEntry.getQueries());
+ }
+
+ // add query to DIVIDE engine
+ // (response cannot be null since it was checked before whether
+ // query with this name already exists)
+ IDivideQuery divideQuery = divideEngine.addDivideQuery(
+ divideQueryName,
+ IOUtilities.removeWhiteSpace(queryPattern).replaceAll("\r", " "),
+ IOUtilities.removeWhiteSpace(sensorQueryRule).replaceAll("\r", " "),
+ IOUtilities.removeWhiteSpace(goal).replaceAll("\r", " "),
+ contextEnrichment);
+
+ String message = "DIVIDE query with name '" + divideQueryName +
+ "' successfully registered";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(
+ GSON.toJson(new DivideQueryRepresentation(divideQuery)),
+ MediaType.APPLICATION_JSON);
+
+ } else {
+ String message = "Not all required DIVIDE query JSON information " +
+ "is specified and non-empty";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } else {
+ String message = "No DIVIDE query JSON information specified";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } else {
+ String message = "DIVIDE query with the specified name already exists";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (DivideInvalidInputException e) {
+ String message = String.format("Query input invalid: %s", e.getMessage());
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while adding DIVIDE query";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+ @Delete
+ public void removeDivideQuery(Representation rep) {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ String divideQueryName = getIdAttribute();
+
+ IDivideQuery divideQuery = divideEngine.getDivideQueryByName(divideQueryName);
+
+ if (divideQuery != null) {
+ // retrieve url parameter which specifies whether the queries of this
+ // DIVIDE query should be unregistered
+ // (default when it is not specified = true)
+ boolean unregisterQueries = !"false".equals(getQueryValue("unregister"));
+
+ divideEngine.removeDivideQuery(divideQueryName, unregisterQueries);
+
+ String message = "DIVIDE query with name " + divideQueryName + " successfully unregistered";
+ getResponse().setStatus(Status.SUCCESS_NO_CONTENT, message);
+
+ } else {
+ String message = "DIVIDE query with name '" + divideQueryName + "' does not exist";
+ getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while removing DIVIDE query";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsRspQlEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsRspQlEndpoint.java
new file mode 100644
index 0000000..7c8dbb9
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsRspQlEndpoint.java
@@ -0,0 +1,166 @@
+package be.ugent.idlab.divide.api.endpoints.query;
+
+import be.ugent.idlab.divide.api.endpoints.CustomEndpoint;
+import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation;
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput;
+import be.ugent.idlab.divide.core.query.parser.DivideQueryParserOutput;
+import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException;
+import be.ugent.idlab.divide.util.query.ContextEnrichmentEntry;
+import be.ugent.idlab.divide.util.query.DivideQueryEntryInQueryFormat;
+import be.ugent.idlab.divide.util.query.DivideQueryEntryParser;
+import be.ugent.idlab.util.io.IOUtilities;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Status;
+import org.restlet.representation.Representation;
+import org.restlet.resource.Options;
+import org.restlet.resource.Post;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class DivideQueryRegistrationAsRspQlEndpoint extends CustomEndpoint {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+ private static final Logger LOGGER =
+ LoggerFactory.getLogger(DivideQueryRegistrationAsRspQlEndpoint.class.getName());
+
+ @Options
+ public void optionsRequestHandler() {
+ Set allowedMethods = new HashSet<>();
+ allowedMethods.add(Method.POST);
+ getResponse().setAccessControlAllowMethods(allowedMethods);
+ getResponse().setAccessControlAllowOrigin("*");
+ }
+
+ public static void logEndpoints(Logger logger) {
+ logger.info(" POST: register DIVIDE query with ID {}" +
+ " (JSON description of RSP-QL input in HTTP body)", SERVER_ATTR_ID);
+ }
+
+ @Post
+ public void addDivideQuery(Representation rep) {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ String divideQueryName = getIdAttribute();
+
+ if (divideEngine.getDivideQueryByName(divideQueryName) == null) {
+
+ String divideQueryJson;
+ if (rep != null && (divideQueryJson = rep.getText()) != null
+ && !divideQueryJson.trim().isEmpty()) {
+
+ // parse JSON entry to real DIVIDE query parser & context enrichment input
+ DivideQueryEntryInQueryFormat divideQueryEntryInQueryFormat =
+ DivideQueryEntryParser.parseRspQlEntryAsDivideQuery(divideQueryJson);
+ DivideQueryParserInput divideQueryParserInput =
+ divideQueryEntryInQueryFormat.getDivideQueryParserInput();
+ ContextEnrichmentEntry contextEnrichmentEntry =
+ divideQueryEntryInQueryFormat.getContextEnrichmentEntry();
+
+ // parse RSP-QL input to actual DIVIDE query inputs
+ DivideQueryParserOutput divideQueryParserOutput =
+ divideEngine.getQueryParser().
+ parseDivideQuery(divideQueryParserInput);
+
+ // ensure all required information is provided
+ if (divideQueryParserOutput.isNonEmpty()) {
+
+ // create context enrichment
+ ContextEnrichment contextEnrichment;
+ if (contextEnrichmentEntry == null
+ || contextEnrichmentEntry.getQueries() == null
+ || contextEnrichmentEntry.getQueries().isEmpty()) {
+ contextEnrichment = new ContextEnrichment();
+ } else {
+ contextEnrichment = new ContextEnrichment(
+ contextEnrichmentEntry.doReasoning(),
+ contextEnrichmentEntry.executeOnOntologyTriples(),
+ contextEnrichmentEntry.getQueries());
+ }
+
+ // add query to DIVIDE engine
+ // (response cannot be null since it was checked before whether
+ // query with this name already exists)
+ IDivideQuery divideQuery = divideEngine.addDivideQuery(
+ divideQueryName,
+ IOUtilities.removeWhiteSpace(
+ divideQueryParserOutput.getQueryPattern()).replaceAll("\r", " "),
+ IOUtilities.removeWhiteSpace(
+ divideQueryParserOutput.getSensorQueryRule()).replaceAll("\r", " "),
+ IOUtilities.removeWhiteSpace(
+ divideQueryParserOutput.getGoal()).replaceAll("\r", " "),
+ contextEnrichment);
+
+ String message = "DIVIDE query with name '" + divideQueryName +
+ "' successfully registered";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(
+ GSON.toJson(new DivideQueryRepresentation(divideQuery)),
+ MediaType.APPLICATION_JSON);
+
+ } else {
+ String message = "Input leads to empty DIVIDE query fields";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } else {
+ String message = "No DIVIDE query JSON information specified";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } else {
+ String message = "DIVIDE query with the specified name already exists";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (InvalidDivideQueryParserInputException e) {
+ String message = String.format("JSON representing RSP-QL query " +
+ "input is invalid: %s", e.getMessage());
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (DivideInvalidInputException e) {
+ String message = String.format("Query input invalid: %s", e.getMessage());
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while adding DIVIDE query";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsSparqlEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsSparqlEndpoint.java
new file mode 100644
index 0000000..4cd0fa0
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsSparqlEndpoint.java
@@ -0,0 +1,166 @@
+package be.ugent.idlab.divide.api.endpoints.query;
+
+import be.ugent.idlab.divide.api.endpoints.CustomEndpoint;
+import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation;
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput;
+import be.ugent.idlab.divide.core.query.parser.DivideQueryParserOutput;
+import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException;
+import be.ugent.idlab.divide.util.query.ContextEnrichmentEntry;
+import be.ugent.idlab.divide.util.query.DivideQueryEntryInQueryFormat;
+import be.ugent.idlab.divide.util.query.DivideQueryEntryParser;
+import be.ugent.idlab.util.io.IOUtilities;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Status;
+import org.restlet.representation.Representation;
+import org.restlet.resource.Options;
+import org.restlet.resource.Post;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class DivideQueryRegistrationAsSparqlEndpoint extends CustomEndpoint {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+ private static final Logger LOGGER =
+ LoggerFactory.getLogger(DivideQueryRegistrationAsSparqlEndpoint.class.getName());
+
+ @Options
+ public void optionsRequestHandler() {
+ Set allowedMethods = new HashSet<>();
+ allowedMethods.add(Method.POST);
+ getResponse().setAccessControlAllowMethods(allowedMethods);
+ getResponse().setAccessControlAllowOrigin("*");
+ }
+
+ public static void logEndpoints(Logger logger) {
+ logger.info(" POST: register DIVIDE query with ID {}" +
+ " (JSON description of SPARQL inputs in HTTP body)", SERVER_ATTR_ID);
+ }
+
+ @Post
+ public void addDivideQuery(Representation rep) {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ String divideQueryName = getIdAttribute();
+
+ if (divideEngine.getDivideQueryByName(divideQueryName) == null) {
+
+ String divideQueryJson;
+ if (rep != null && (divideQueryJson = rep.getText()) != null
+ && !divideQueryJson.trim().isEmpty()) {
+
+ // parse JSON entry to real DIVIDE query parser & context enrichment input
+ DivideQueryEntryInQueryFormat divideQueryEntryInQueryFormat =
+ DivideQueryEntryParser.parseSparqlEntryAsDivideQuery(divideQueryJson);
+ DivideQueryParserInput divideQueryParserInput =
+ divideQueryEntryInQueryFormat.getDivideQueryParserInput();
+ ContextEnrichmentEntry contextEnrichmentEntry =
+ divideQueryEntryInQueryFormat.getContextEnrichmentEntry();
+
+ // parse SPARQL input to actual DIVIDE query inputs
+ DivideQueryParserOutput divideQueryParserOutput =
+ divideEngine.getQueryParser().
+ parseDivideQuery(divideQueryParserInput);
+
+ // ensure all required information is provided
+ if (divideQueryParserOutput.isNonEmpty()) {
+
+ // create context enrichment
+ ContextEnrichment contextEnrichment;
+ if (contextEnrichmentEntry == null
+ || contextEnrichmentEntry.getQueries() == null
+ || contextEnrichmentEntry.getQueries().isEmpty()) {
+ contextEnrichment = new ContextEnrichment();
+ } else {
+ contextEnrichment = new ContextEnrichment(
+ contextEnrichmentEntry.doReasoning(),
+ contextEnrichmentEntry.executeOnOntologyTriples(),
+ contextEnrichmentEntry.getQueries());
+ }
+
+ // add query to DIVIDE engine
+ // (response cannot be null since it was checked before whether
+ // query with this name already exists)
+ IDivideQuery divideQuery = divideEngine.addDivideQuery(
+ divideQueryName,
+ IOUtilities.removeWhiteSpace(
+ divideQueryParserOutput.getQueryPattern()).replaceAll("\r", " "),
+ IOUtilities.removeWhiteSpace(
+ divideQueryParserOutput.getSensorQueryRule()).replaceAll("\r", " "),
+ IOUtilities.removeWhiteSpace(
+ divideQueryParserOutput.getGoal()).replaceAll("\r", " "),
+ contextEnrichment);
+
+ String message = "DIVIDE query with name '" + divideQueryName +
+ "' successfully registered";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(
+ GSON.toJson(new DivideQueryRepresentation(divideQuery)),
+ MediaType.APPLICATION_JSON);
+
+ } else {
+ String message = "Input leads to empty DIVIDE query fields";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } else {
+ String message = "No DIVIDE query JSON information specified";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } else {
+ String message = "DIVIDE query with the specified name already exists";
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+ }
+
+ } catch (InvalidDivideQueryParserInputException e) {
+ String message = String.format("JSON representing SPARQL query " +
+ "input is invalid: %s", e.getMessage());
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (DivideInvalidInputException e) {
+ String message = String.format("Query input invalid: %s", e.getMessage());
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while adding DIVIDE query";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/GeneralDivideQueryEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/GeneralDivideQueryEndpoint.java
new file mode 100644
index 0000000..e453691
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/GeneralDivideQueryEndpoint.java
@@ -0,0 +1,78 @@
+package be.ugent.idlab.divide.api.endpoints.query;
+
+import be.ugent.idlab.divide.api.endpoints.CustomEndpoint;
+import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation;
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.restlet.data.MediaType;
+import org.restlet.data.Method;
+import org.restlet.data.Status;
+import org.restlet.resource.Get;
+import org.restlet.resource.Options;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+public class GeneralDivideQueryEndpoint extends CustomEndpoint {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+ private static final Logger LOGGER = LoggerFactory.getLogger(GeneralDivideQueryEndpoint.class.getName());
+
+ @Options
+ public void optionsRequestHandler() {
+ Set allowedMethods = new HashSet<>();
+ allowedMethods.add(Method.GET);
+ getResponse().setAccessControlAllowMethods(allowedMethods);
+ getResponse().setAccessControlAllowOrigin("*");
+ }
+
+ public static void logEndpoints(Logger logger) {
+ logger.info(" GET: retrieve all registered DIVIDE queries");
+ }
+
+ @Get
+ public void getQueries() {
+ getResponse().setAccessControlAllowOrigin("*");
+
+ IDivideEngine divideEngine = getDivideEngine();
+
+ try {
+ Collection components = new ArrayList<>();
+
+ for (IDivideQuery divideQuery : divideEngine.getDivideQueries()) {
+ components.add(new DivideQueryRepresentation(divideQuery));
+ }
+
+ String message = "DIVIDE queries successfully retrieved";
+ getResponse().setStatus(Status.SUCCESS_OK, message);
+ getResponse().setEntity(GSON.toJson(components), MediaType.APPLICATION_JSON);
+
+ } catch (DivideNotInitializedException e) {
+ String message = e.getMessage();
+ LOGGER.error(message, e);
+ getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } catch (Exception e) {
+ String logMessage = "Error while retrieving DIVIDE queries";
+ String eMessage = e.getMessage();
+ String message = logMessage + (eMessage != null ? ": " + eMessage : "");
+ LOGGER.error(logMessage, e);
+ getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message);
+ getResponse().setEntity(message, MediaType.TEXT_PLAIN);
+
+ } finally {
+ getResponse().commit();
+ commit();
+ release();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/ComponentRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/ComponentRepresentation.java
new file mode 100644
index 0000000..c90309f
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/ComponentRepresentation.java
@@ -0,0 +1,21 @@
+package be.ugent.idlab.divide.api.representation.component;
+
+import be.ugent.idlab.divide.core.component.IComponent;
+
+import java.util.List;
+
+@SuppressWarnings({"FieldCanBeLocal", "unused"})
+public class ComponentRepresentation {
+
+ private final String id;
+ private final List contextIris;
+ private final RspEngineRepresentation rspEngine;
+
+ public ComponentRepresentation(IComponent component) {
+ this.id = component.getId();
+ this.contextIris = component.getContextIris();
+ this.rspEngine = new RspEngineRepresentation(
+ component.getRspEngineHandler().getRspEngine());
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspEngineRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspEngineRepresentation.java
new file mode 100644
index 0000000..f1e896a
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspEngineRepresentation.java
@@ -0,0 +1,27 @@
+package be.ugent.idlab.divide.api.representation.component;
+
+
+import be.ugent.idlab.divide.rsp.engine.IRspEngine;
+import be.ugent.idlab.divide.rsp.query.IRspQuery;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings({"FieldCanBeLocal", "unused", "WeakerAccess", "MismatchedQueryAndUpdateOfCollection"})
+public class RspEngineRepresentation {
+
+ private final String queryLanguage;
+ private final String url;
+ private final List registeredQueries;
+
+ public RspEngineRepresentation(IRspEngine engine) {
+ this.queryLanguage = engine.getRspQueryLanguage().toString().toLowerCase();
+ this.url = engine.getBaseUrl();
+
+ this.registeredQueries = new ArrayList<>();
+ for (IRspQuery rspQuery : engine.getRegisteredQueries()) {
+ registeredQueries.add(new RspQueryRepresentation(rspQuery));
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspQueryRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspQueryRepresentation.java
new file mode 100644
index 0000000..1016d65
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspQueryRepresentation.java
@@ -0,0 +1,16 @@
+package be.ugent.idlab.divide.api.representation.component;
+
+import be.ugent.idlab.divide.rsp.query.IRspQuery;
+
+@SuppressWarnings({"FieldCanBeLocal", "unused", "WeakerAccess"})
+public class RspQueryRepresentation {
+
+ private final String queryName;
+ private final String queryBody;
+
+ public RspQueryRepresentation(IRspQuery query) {
+ this.queryName = query.getQueryName();
+ this.queryBody = query.getQueryBody();
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/ContextEnrichmentRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/ContextEnrichmentRepresentation.java
new file mode 100644
index 0000000..c626e28
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/ContextEnrichmentRepresentation.java
@@ -0,0 +1,44 @@
+package be.ugent.idlab.divide.api.representation.query;
+
+import be.ugent.idlab.divide.core.context.ContextEnrichingQuery;
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+@SuppressWarnings({"FieldCanBeLocal", "unused", "WeakerAccess"})
+public class ContextEnrichmentRepresentation {
+
+ private final boolean doReasoning;
+ private final boolean executeOnOntologyTriples;
+ private final List queries;
+
+ public ContextEnrichmentRepresentation(ContextEnrichment contextEnrichment) {
+ switch(contextEnrichment.getMode()) {
+ case EXECUTE_ON_CONTEXT_WITHOUT_REASONING:
+ this.doReasoning = false;
+ this.executeOnOntologyTriples = false;
+
+ break;
+ case EXECUTE_ON_CONTEXT_WITH_REASONING:
+ this.doReasoning = true;
+ this.executeOnOntologyTriples = false;
+
+ break;
+ case EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING:
+ this.doReasoning = false;
+ this.executeOnOntologyTriples = true;
+
+ break;
+ case EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING:
+ default:
+ this.doReasoning = true;
+ this.executeOnOntologyTriples = true;
+ }
+
+ this.queries = contextEnrichment.getQueries().stream()
+ .map(ContextEnrichingQuery::getQuery)
+ .collect(Collectors.toList());
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/DivideQueryRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/DivideQueryRepresentation.java
new file mode 100644
index 0000000..aedaec8
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/DivideQueryRepresentation.java
@@ -0,0 +1,23 @@
+package be.ugent.idlab.divide.api.representation.query;
+
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+
+@SuppressWarnings({"FieldCanBeLocal", "unused"})
+public class DivideQueryRepresentation {
+
+ private final String name;
+ private final String queryPattern;
+ private final String sensorQueryRule;
+ private final String goal;
+ private final ContextEnrichmentRepresentation contextEnrichment;
+
+ public DivideQueryRepresentation(IDivideQuery divideQuery) {
+ this.name = divideQuery.getName();
+ this.queryPattern = divideQuery.getQueryPattern();
+ this.sensorQueryRule = divideQuery.getSensorQueryRule();
+ this.goal = divideQuery.getGoal();
+ this.contextEnrichment = new ContextEnrichmentRepresentation(
+ divideQuery.getContextEnrichment());
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntry.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntry.java
new file mode 100644
index 0000000..ae43740
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntry.java
@@ -0,0 +1,33 @@
+package be.ugent.idlab.divide.util.component;
+
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+
+import java.util.List;
+
+public class ComponentEntry {
+
+ private final List contextIris;
+ private final RspQueryLanguage rspQueryLanguage;
+ private final String rspEngineUrl;
+
+ public ComponentEntry(List contextIris,
+ RspQueryLanguage rspQueryLanguage,
+ String rspEngineUrl) {
+ this.contextIris = contextIris;
+ this.rspQueryLanguage = rspQueryLanguage;
+ this.rspEngineUrl = rspEngineUrl;
+ }
+
+ public List getContextIris() {
+ return contextIris;
+ }
+
+ public RspQueryLanguage getRspQueryLanguage() {
+ return rspQueryLanguage;
+ }
+
+ public String getRspEngineUrl() {
+ return rspEngineUrl;
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParser.java
new file mode 100644
index 0000000..e1dd4f7
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParser.java
@@ -0,0 +1,48 @@
+package be.ugent.idlab.divide.util.component;
+
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.List;
+
+public class ComponentEntryParser {
+
+ static void validateContextIris(List contextIris)
+ throws ComponentEntryParserException{
+ for (String contextIri : contextIris) {
+ if (contextIri == null || contextIri.trim().isEmpty()) {
+ throw new ComponentEntryParserException(
+ "Component entry contains empty context IRIs");
+ }
+ }
+ }
+
+ static RspQueryLanguage parseRspEngineQueryLanguage(String input)
+ throws ComponentEntryParserException {
+ RspQueryLanguage rspQueryLanguage = RspQueryLanguage.fromString(input.trim());
+ if (rspQueryLanguage == null) {
+ throw new ComponentEntryParserException(String.format(
+ "Component entry contains invalid/unsupported RSP query language '%s'",
+ input));
+ }
+ return rspQueryLanguage;
+ }
+
+ static void validateRspEngineUrl(String rspEngineUrl)
+ throws ComponentEntryParserException {
+ try {
+ URL url = new URL(rspEngineUrl);
+ if (!url.getProtocol().equals("http") && !url.getProtocol().equals("https")) {
+ throw new ComponentEntryParserException(String.format(
+ "Component entry contains non HTTP(S) RSP engine URL '%s'",
+ rspEngineUrl));
+ }
+ } catch (MalformedURLException e) {
+ throw new ComponentEntryParserException(String.format(
+ "Component entry contains invalid RSP engine URL '%s'",
+ rspEngineUrl));
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParserException.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParserException.java
new file mode 100644
index 0000000..e1fe029
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParserException.java
@@ -0,0 +1,18 @@
+package be.ugent.idlab.divide.util.component;
+
+@SuppressWarnings("unused")
+public class ComponentEntryParserException extends Exception {
+
+ public ComponentEntryParserException(String description, Exception base) {
+ super(description, base);
+ }
+
+ public ComponentEntryParserException(String description) {
+ super(description);
+ }
+
+ public ComponentEntryParserException(Exception base) {
+ super(base);
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/CsvComponentEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/CsvComponentEntryParser.java
new file mode 100644
index 0000000..3f8efb5
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/CsvComponentEntryParser.java
@@ -0,0 +1,95 @@
+package be.ugent.idlab.divide.util.component;
+
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+import be.ugent.idlab.util.io.IOUtilities;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Class used for parsing configuration of component entries.
+ * These configurations can be present in a CSV file (e.g. used during start-up
+ * of a DIVIDE server with a known set of components) or in the HTTP body of
+ * a component creation request to the DIVIDE API.
+ */
+public class CsvComponentEntryParser {
+
+ private static final String DELIMITER = ";";
+
+ /**
+ * Parses a CSV file containing DIVIDE component entries on each line.
+ * A single line uses the delimiter ';' to split the different elements
+ * of the configuration entry.
+ *
+ * @param csvFile path to CSV file containing component configurations
+ * @return a list of parsed component entries of which the gettable fields can
+ * directly be used as input for the registration of components in a
+ * DIVIDE engine using the {@link IDivideEngine#registerComponent(
+ * List, RspQueryLanguage, String)} method
+ * @throws ComponentEntryParserException if a component configuration in the CSV file is
+ * invalid (invalid list of additional context IRIs,
+ * invalid RSP engine URL, or invalid
+ * RSP query language)
+ * @throws IllegalArgumentException if CSV file does not exist or is empty
+ */
+ public static List parseComponentEntryFile(String csvFile)
+ throws ComponentEntryParserException {
+ List componentEntries = new ArrayList<>();
+
+ // read CSV file
+ List componentEntryStrings = IOUtilities.readCsvFile(csvFile, DELIMITER);
+ if (componentEntryStrings.isEmpty()) {
+ throw new IllegalArgumentException("CSV file does not exist or is empty");
+ }
+
+ // parse component entries in CSV file
+ for (String[] componentEntryString : componentEntryStrings) {
+ componentEntries.add(parseComponentEntry(componentEntryString));
+ }
+
+ return componentEntries;
+ }
+
+ private static ComponentEntry parseComponentEntry(String[] entry)
+ throws ComponentEntryParserException {
+ if (entry.length == 4) {
+ // retrieve main context IRI
+ String mainContextIri = entry[0].trim();
+
+ // convert array string to actual array of additional context IRIs
+ if (!entry[1].trim().matches("\\[[^\\[\\]]+]")) {
+ throw new ComponentEntryParserException(
+ "Component entry contains invalid list of additional IRIs");
+ }
+ List contextIris = new ArrayList<>();
+ contextIris.add(mainContextIri);
+ if (!entry[1].replace(" ", "").replace("\t", "").trim().equals("[]")) {
+ contextIris.addAll(
+ Arrays.stream(entry[1].replace("[", "").replace("]", "").split(","))
+ .map(String::trim)
+ .collect(Collectors.toList()));
+ }
+ ComponentEntryParser.validateContextIris(contextIris);
+
+ // parse RSP query language
+ RspQueryLanguage rspQueryLanguage =
+ ComponentEntryParser.parseRspEngineQueryLanguage(entry[2]);
+
+ // parse RSP engine URL
+ String rspEngineUrl = entry[3].trim();
+ ComponentEntryParser.validateRspEngineUrl(rspEngineUrl);
+
+ // if no errors, then return new component entry
+ return new ComponentEntry(
+ contextIris, rspQueryLanguage, rspEngineUrl);
+
+ } else {
+ throw new ComponentEntryParserException(
+ "Component entry does not contain 4 elements");
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntry.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntry.java
new file mode 100644
index 0000000..24edb75
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntry.java
@@ -0,0 +1,67 @@
+package be.ugent.idlab.divide.util.component;
+
+import java.util.List;
+
+@SuppressWarnings("unused")
+public class JsonComponentEntry {
+
+ private List contextIris;
+ private RspEngineEntry rspEngine;
+
+ public JsonComponentEntry(List contextIris, RspEngineEntry rspEngine) {
+ this.contextIris = contextIris;
+ this.rspEngine = rspEngine;
+ }
+
+ public List getContextIris() {
+ return contextIris;
+ }
+
+ public void setContextIris(List contextIris) {
+ this.contextIris = contextIris;
+ }
+
+ public RspEngineEntry getRspEngine() {
+ return rspEngine;
+ }
+
+ public void setRspEngine(RspEngineEntry rspEngine) {
+ this.rspEngine = rspEngine;
+ }
+
+ public boolean validateIfNonNull() {
+ return contextIris != null &&
+ rspEngine != null &&
+ rspEngine.queryLanguage != null &&
+ rspEngine.url != null;
+ }
+
+ static class RspEngineEntry {
+
+ public RspEngineEntry(String queryLanguage, String url) {
+ this.queryLanguage = queryLanguage;
+ this.url = url;
+ }
+
+ private String queryLanguage;
+ private String url;
+
+ public String getQueryLanguage() {
+ return queryLanguage;
+ }
+
+ public void setQueryLanguage(String queryLanguage) {
+ this.queryLanguage = queryLanguage;
+ }
+
+ public String getUrl() {
+ return url;
+ }
+
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntryParser.java
new file mode 100644
index 0000000..3e60d92
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntryParser.java
@@ -0,0 +1,71 @@
+package be.ugent.idlab.divide.util.component;
+
+import be.ugent.idlab.divide.core.engine.IDivideEngine;
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+/**
+ * Class used for parsing configuration of component entries.
+ * These configurations can be present in a CSV file (e.g. used during start-up
+ * of a DIVIDE server with a known set of components) or in the HTTP body of
+ * a component creation request to the DIVIDE API.
+ */
+public class JsonComponentEntryParser {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+
+ /**
+ * Parses a DIVIDE component entry, which is specified in JSON format.
+ *
+ * @param json component configuration as a JSON string
+ * @return a parsed component entry of which the gettable fields can
+ * directly be used as input for the registration of components in a
+ * DIVIDE engine using the {@link IDivideEngine#registerComponent(
+ * List, RspQueryLanguage, String)} method
+ * @throws ComponentEntryParserException if the specified component configuration is
+ * not in the required JSON format
+ */
+ public static ComponentEntry parseComponentEntry(String json)
+ throws ComponentEntryParserException {
+ // parse json
+ JsonComponentEntry jsonComponentEntry =
+ GSON.fromJson(json, JsonComponentEntry.class);
+
+ // check if all fields are non-null
+ boolean valid = jsonComponentEntry.validateIfNonNull();
+ if (!valid) {
+ throw new ComponentEntryParserException("Not all required JSON fields are present");
+ }
+
+ return parseComponentEntry(jsonComponentEntry);
+ }
+
+ private static ComponentEntry parseComponentEntry(JsonComponentEntry jsonComponentEntry)
+ throws ComponentEntryParserException {
+ // parse context IRIs
+ List contextIris = jsonComponentEntry.getContextIris().stream()
+ .map(String::trim)
+ .collect(Collectors.toList());
+ ComponentEntryParser.validateContextIris(contextIris);
+
+ // parse RSP query language
+ RspQueryLanguage rspQueryLanguage =
+ ComponentEntryParser.parseRspEngineQueryLanguage(
+ jsonComponentEntry.getRspEngine().getQueryLanguage());
+
+ // parse RSP engine registration URL
+ String rspEngineUrl = jsonComponentEntry.getRspEngine().getUrl();
+ ComponentEntryParser.validateRspEngineUrl(rspEngineUrl);
+
+ // if no errors, then return new component entry
+ return new ComponentEntry(
+ contextIris,
+ rspQueryLanguage,
+ rspEngineUrl);
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/ContextEnrichmentEntry.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/ContextEnrichmentEntry.java
new file mode 100644
index 0000000..99a6ebb
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/ContextEnrichmentEntry.java
@@ -0,0 +1,43 @@
+package be.ugent.idlab.divide.util.query;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("unused")
+public class ContextEnrichmentEntry {
+
+ private boolean doReasoning;
+ private boolean executeOnOntologyTriples;
+ private List queries;
+
+ public ContextEnrichmentEntry() {
+ this.doReasoning = true;
+ this.executeOnOntologyTriples = true;
+ this.queries = new ArrayList<>();
+ }
+
+ public boolean doReasoning() {
+ return doReasoning;
+ }
+
+ public void setDoReasoning(boolean doReasoning) {
+ this.doReasoning = doReasoning;
+ }
+
+ public boolean executeOnOntologyTriples() {
+ return executeOnOntologyTriples;
+ }
+
+ public void setExecuteOnOntologyTriples(boolean executeOnOntologyTriples) {
+ this.executeOnOntologyTriples = executeOnOntologyTriples;
+ }
+
+ public List getQueries() {
+ return queries;
+ }
+
+ public void setQueries(List queries) {
+ this.queries = queries;
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInDivideFormat.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInDivideFormat.java
new file mode 100644
index 0000000..0350323
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInDivideFormat.java
@@ -0,0 +1,48 @@
+package be.ugent.idlab.divide.util.query;
+
+@SuppressWarnings("unused")
+public class DivideQueryEntryInDivideFormat {
+
+ private String queryPattern;
+ private String sensorQueryRule;
+ private String goal;
+
+ private ContextEnrichmentEntry contextEnrichment;
+
+ public DivideQueryEntryInDivideFormat() {
+ // empty on purpose
+ }
+
+ public String getQueryPattern() {
+ return queryPattern;
+ }
+
+ public void setQueryPattern(String queryPattern) {
+ this.queryPattern = queryPattern;
+ }
+
+ public String getSensorQueryRule() {
+ return sensorQueryRule;
+ }
+
+ public void setSensorQueryRule(String sensorQueryRule) {
+ this.sensorQueryRule = sensorQueryRule;
+ }
+
+ public String getGoal() {
+ return goal;
+ }
+
+ public void setGoal(String goal) {
+ this.goal = goal;
+ }
+
+ public ContextEnrichmentEntry getContextEnrichment() {
+ return contextEnrichment;
+ }
+
+ public void setContextEnrichment(ContextEnrichmentEntry contextEnrichmentEntry) {
+ this.contextEnrichment = contextEnrichmentEntry;
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInQueryFormat.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInQueryFormat.java
new file mode 100644
index 0000000..78324a1
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInQueryFormat.java
@@ -0,0 +1,24 @@
+package be.ugent.idlab.divide.util.query;
+
+import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput;
+
+public class DivideQueryEntryInQueryFormat {
+
+ DivideQueryParserInput divideQueryParserInput;
+ ContextEnrichmentEntry contextEnrichmentEntry;
+
+ public DivideQueryEntryInQueryFormat(DivideQueryParserInput divideQueryParserInput,
+ ContextEnrichmentEntry contextEnrichmentEntry) {
+ this.divideQueryParserInput = divideQueryParserInput;
+ this.contextEnrichmentEntry = contextEnrichmentEntry;
+ }
+
+ public DivideQueryParserInput getDivideQueryParserInput() {
+ return divideQueryParserInput;
+ }
+
+ public ContextEnrichmentEntry getContextEnrichmentEntry() {
+ return contextEnrichmentEntry;
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParser.java
new file mode 100644
index 0000000..56926b2
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParser.java
@@ -0,0 +1,72 @@
+package be.ugent.idlab.divide.util.query;
+
+import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput;
+import be.ugent.idlab.divide.core.query.parser.InputQueryLanguage;
+import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.gson.JsonSyntaxException;
+
+public class DivideQueryEntryParser {
+
+ private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create();
+
+ public static DivideQueryEntryInQueryFormat parseSparqlEntryAsDivideQuery(String json)
+ throws InvalidDivideQueryParserInputException {
+ // parse DIVIDE query parser input
+ DivideQueryParserInput input;
+ try {
+ input = GSON.fromJson(json, DivideQueryParserInput.class);
+ input.setInputQueryLanguage(InputQueryLanguage.SPARQL);
+ } catch (JsonSyntaxException e) {
+ throw new InvalidDivideQueryParserInputException("Invalid JSON syntax", e);
+ }
+
+ // parse context enrichment entry
+ JsonObject jsonObject = new JsonParser().parse(json).getAsJsonObject();
+ ContextEnrichmentEntry contextEnrichmentEntry = jsonObject.get("contextEnrichment") != null ?
+ GSON.fromJson(jsonObject.get("contextEnrichment").toString(),
+ ContextEnrichmentEntry.class) : null;
+
+ return new DivideQueryEntryInQueryFormat(input, contextEnrichmentEntry);
+ }
+
+ public static DivideQueryEntryInQueryFormat parseRspQlEntryAsDivideQuery(String json)
+ throws InvalidDivideQueryParserInputException {
+ DivideQueryParserInput input;
+ try {
+ input = GSON.fromJson(json, DivideQueryParserInput.class);
+ input.setInputQueryLanguage(InputQueryLanguage.RSP_QL);
+ } catch (JsonSyntaxException e) {
+ throw new InvalidDivideQueryParserInputException("Invalid JSON syntax", e);
+ }
+
+ // parse context enrichment entry
+ JsonObject jsonObject = new JsonParser().parse(json).getAsJsonObject();
+ ContextEnrichmentEntry contextEnrichmentEntry = jsonObject.get("contextEnrichment") != null ?
+ GSON.fromJson(jsonObject.get("contextEnrichment").toString(),
+ ContextEnrichmentEntry.class) : null;
+
+ return new DivideQueryEntryInQueryFormat(input, contextEnrichmentEntry);
+ }
+
+ public static DivideQueryEntryInDivideFormat parseDivideQueryEntryInDivideFormat(String json)
+ throws DivideQueryEntryParserException {
+ // check if any json is given:
+ // if not, no context enrichment entry is defined -> return empty entry
+ if (json == null || json.trim().isEmpty()) {
+ return new DivideQueryEntryInDivideFormat();
+ }
+
+ // parse json
+ try {
+ return GSON.fromJson(json, DivideQueryEntryInDivideFormat.class);
+ } catch (Exception e) {
+ throw new DivideQueryEntryParserException(
+ "DIVIDE query is not in expected JSON format", e);
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParserException.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParserException.java
new file mode 100644
index 0000000..d77b909
--- /dev/null
+++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParserException.java
@@ -0,0 +1,18 @@
+package be.ugent.idlab.divide.util.query;
+
+@SuppressWarnings("unused")
+public class DivideQueryEntryParserException extends Exception {
+
+ public DivideQueryEntryParserException(String description, Exception base) {
+ super(description, base);
+ }
+
+ public DivideQueryEntryParserException(String description) {
+ super(description);
+ }
+
+ public DivideQueryEntryParserException(Exception base) {
+ super(base);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/pom.xml b/src/divide-central/divide-engine/pom.xml
new file mode 100644
index 0000000..67e8220
--- /dev/null
+++ b/src/divide-central/divide-engine/pom.xml
@@ -0,0 +1,72 @@
+
+
+
+ divide
+ be.ugent.idlab
+ 1.0
+
+ 4.0.0
+
+ divide-engine
+
+
+ UTF-8
+ 1.8
+ 1.8
+
+
+
+
+
+ be.ugent.idlab
+ knowledge-base-common
+ 1.0
+
+
+ be.ugent.idlab
+ knowledge-base-jena3
+ 2.0
+
+
+ be.ugent.idlab
+ knowledge-base-api
+ 1.0
+
+
+
+
+ commons-configuration
+ commons-configuration
+ 1.10
+
+
+
+
+ be.ugent.idlab
+ rdf-utilities
+ 1.0
+
+
+ be.ugent.idlab
+ rdf-utilities-jena3-owlapi4
+ 1.0
+
+
+ be.ugent.idlab
+ jena-rule-utilities
+ 1.0
+
+
+ be.ugent.idlab
+ eye-utilities
+ 1.0
+
+
+ be.ugent.idlab
+ http-utilities
+ 1.0
+
+
+
\ No newline at end of file
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/Component.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/Component.java
new file mode 100644
index 0000000..6166d77
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/Component.java
@@ -0,0 +1,59 @@
+package be.ugent.idlab.divide.core.component;
+
+import be.ugent.idlab.divide.core.context.IContextEnricher;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.rsp.IRspEngineHandler;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class Component implements IComponent {
+
+ private final String id;
+ private final IRspEngineHandler rspEngineHandler;
+ private final List contextIris;
+ private final Map contextEnricherMap;
+
+ Component(String id,
+ IRspEngineHandler rspEngineHandler,
+ List contextIris) {
+ this.id = id;
+ this.rspEngineHandler = rspEngineHandler;
+ this.contextIris = new ArrayList<>(contextIris);
+ this.contextEnricherMap = new HashMap<>();
+ }
+
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public IRspEngineHandler getRspEngineHandler() {
+ return rspEngineHandler;
+ }
+
+ @Override
+ public List getContextIris() {
+ return contextIris;
+ }
+
+ @Override
+ public synchronized void registerContextEnricher(IDivideQuery divideQuery,
+ IContextEnricher contextEnricher) {
+ contextEnricherMap.put(divideQuery.getName(), contextEnricher);
+ }
+
+ @Override
+ public synchronized void unregisterContextEnricher(IDivideQuery divideQuery) {
+ contextEnricherMap.remove(divideQuery.getName());
+ }
+
+ @Override
+ public synchronized IContextEnricher getContextEnricher(IDivideQuery divideQuery) {
+ return contextEnricherMap.get(divideQuery.getName());
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/ComponentFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/ComponentFactory.java
new file mode 100644
index 0000000..55743fe
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/ComponentFactory.java
@@ -0,0 +1,70 @@
+package be.ugent.idlab.divide.core.component;
+
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.rsp.IRspEngineHandler;
+import be.ugent.idlab.divide.rsp.RspEngineHandlerFactory;
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+
+@SuppressWarnings({"UnusedReturnValue", "unused"})
+public class ComponentFactory {
+
+ private static final Logger LOGGER =
+ LoggerFactory.getLogger(ComponentFactory.class.getName());
+
+ /**
+ * Creates an {@link IComponent} instance with the given inputs.
+ *
+ * @param contextIris IRIs of the ABoxes in a knowledge base that represent the relevant
+ * context associated to the new {@link IComponent}
+ * @param rspQueryLanguage RSP query language used by the RSP engine running on
+ * the created component
+ * @param rspEngineUrl URL which should be used for communication with the RSP engine
+ * running on the created component, and which will also be mapped
+ * to a unique ID for the created component
+ * @return the new {@link IComponent}
+ * @throws DivideInvalidInputException if the RSP engine URL is no valid URL
+ */
+ public static IComponent createInstance(List contextIris,
+ RspQueryLanguage rspQueryLanguage,
+ String rspEngineUrl)
+ throws DivideInvalidInputException {
+ // create a handler for the RSP engine running on the new component
+ // (this includes a validation of the URL to communicate with the engine later on)
+ IRspEngineHandler rspEngine = RspEngineHandlerFactory.createInstance(
+ rspQueryLanguage, rspEngineUrl);
+
+ // update RSP engine URL to validated & preprocessed URL
+ rspEngineUrl = rspEngine.getRspEngine().getBaseUrl();
+
+ // create a unique ID which is a modified version of the RSP engine
+ // URL that is file system friendly (i.e., that can be
+ // used in file names and directory names)
+ String id;
+ try {
+ URL url = new URL(rspEngineUrl);
+ id = String.format("%s-%d-%s",
+ url.getHost(),
+ url.getPort() != -1 ? url.getPort() : 80,
+ URLEncoder.encode(url.getPath(), StandardCharsets.UTF_8.toString()).
+ replaceAll("%", ""));
+ } catch (MalformedURLException | UnsupportedEncodingException e) {
+ // should never occur since the URL has been validated when creating the
+ // IRspEngineHandler above
+ LOGGER.error("The created component is null, so an unknown input validation " +
+ "error has occurred");
+ throw new DivideInvalidInputException("An unknown input validation error has occurred", e);
+ }
+
+ return new Component(id, rspEngine, contextIris);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/IComponent.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/IComponent.java
new file mode 100644
index 0000000..fd967b6
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/IComponent.java
@@ -0,0 +1,56 @@
+package be.ugent.idlab.divide.core.component;
+
+import be.ugent.idlab.divide.core.context.IContextEnricher;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.rsp.IRspEngineHandler;
+
+import java.util.List;
+
+/**
+ * Representation of a DIVIDE component.
+ * It has an ID, a list of context IRIs, and an {@link IRspEngineHandler}.
+ * The list of context IRIs contains all ABox IRIs in the knowledge base that
+ * represent the relevant context of this component, i.e., when updates to the ABox
+ * associated to any of its context IRIs occurs, the DIVIDE query derivation of the
+ * associated {@link IRspEngineHandler} should be triggered.
+ */
+public interface IComponent {
+
+ /**
+ * Retrieves the ID of this {@link IComponent}.
+ * This ID is a unique and therefore based on the registration URL
+ * of the RSP engine running on this component.
+ *
+ * @return the ID of this {@link IComponent}
+ */
+ String getId();
+
+ /**
+ * Retrieves the different context IRIs of this {@link IComponent}. This is a list
+ * of all ABox IRIs in the knowledge base that represent the relevant context of
+ * this component, i.e., when updates to the ABox associated to any of these context
+ * IRIs occurs, the DIVIDE query derivation of the associated {@link IRspEngineHandler}
+ * should be triggered.
+ *
+ * @return the different context IRIs of this {@link IComponent}
+ */
+ List getContextIris();
+
+ /**
+ * Retrieves the {@link IRspEngineHandler} of this component that manages the
+ * RSP engine running on this component. In concrete, it handles the queries
+ * registered to this engine, to ensure that the relevant queries are being
+ * executed by this RSP engine at all times.
+ *
+ * @return the {@link IRspEngineHandler} of this component that manages the
+ * RSP engine running on this component
+ */
+ IRspEngineHandler getRspEngineHandler();
+
+ void registerContextEnricher(IDivideQuery divideQuery, IContextEnricher contextEnricher);
+
+ void unregisterContextEnricher(IDivideQuery divideQuery);
+
+ IContextEnricher getContextEnricher(IDivideQuery divideQuery);
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/Context.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/Context.java
new file mode 100644
index 0000000..8ca5ef7
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/Context.java
@@ -0,0 +1,54 @@
+package be.ugent.idlab.divide.core.context;
+
+import org.apache.jena.rdf.model.Model;
+import org.apache.jena.rdf.model.ModelFactory;
+
+import java.util.UUID;
+
+public class Context {
+
+ private final String id;
+ private Model context;
+
+ private boolean enriched;
+
+ public Context(Model context) {
+ this.id = UUID.randomUUID().toString();
+ this.context = context;
+ this.enriched = false;
+ }
+
+ Context(String id, Model context) {
+ this.id = id;
+ this.context = context;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public Model getContext() {
+ return context;
+ }
+
+ public void enrichContext(Model context) {
+ if (!enriched) {
+ this.context = context;
+ enriched = true;
+ } else {
+ throw new RuntimeException(String.format(
+ "Context with ID '%s' has already been enriched", id));
+ }
+ }
+
+ public long size() {
+ return context.size();
+ }
+
+ public Context copy() {
+ Model newModel = ModelFactory.createDefaultModel();
+ newModel.add(context.listStatements());
+ return new Context(newModel);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricher.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricher.java
new file mode 100644
index 0000000..02290f7
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricher.java
@@ -0,0 +1,264 @@
+package be.ugent.idlab.divide.core.context;
+
+import be.ugent.idlab.divide.core.engine.DivideOntology;
+import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaUtilities;
+import org.apache.jena.query.QueryExecution;
+import org.apache.jena.query.QueryExecutionFactory;
+import org.apache.jena.rdf.model.InfModel;
+import org.apache.jena.rdf.model.Model;
+import org.apache.jena.rdf.model.ModelFactory;
+import org.apache.jena.reasoner.rulesys.GenericRuleReasoner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+public class ContextEnricher implements IContextEnricher {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ContextEnricher.class.getName());
+
+ private final List queries;
+ private final ContextEnricherMode mode;
+ private final String componentId;
+
+ private DivideOntology registeredOntology;
+ private Model baseModel;
+
+ public ContextEnricher(List queries,
+ ContextEnricherMode mode,
+ String componentId) {
+ this.queries = queries;
+ this.mode = mode;
+ this.componentId = componentId;
+
+ this.registeredOntology = null;
+ this.baseModel = ModelFactory.createDefaultModel();
+ }
+
+ @Override
+ public synchronized void registerOntology(DivideOntology ontology) {
+ LOGGER.info("Registering ontology with ID {} to context enricher of component {} with mode {}",
+ ontology.getId(), componentId, mode);
+
+ // check if currently registered ontology exists and has the same ID
+ // as the new ontology
+ if (this.registeredOntology != null &&
+ this.registeredOntology.getId().equals(ontology.getId())) {
+ // -> if yes, then no action should be taken anymore
+ return;
+ }
+
+ // update saved ontology to the new ontology
+ this.registeredOntology = ontology;
+
+ if (this.queries.isEmpty()) {
+ // if no queries are registered for context enrichment, then there is
+ // no need to do the ontology registration process
+ return;
+ }
+
+ if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING ||
+ this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING) {
+ // when only executing the queries on the context, nothing should be
+ // done with the triples of the registered ontology
+ // -> only if reasoning is still done, the rules need to be parsed
+ if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING) {
+ GenericRuleReasoner reasoner = new GenericRuleReasoner(ontology.getRules());
+ // RETE algorithm is required to ensurer fast incremental reasoning
+ // -> downside: for some reason, the output of reasoning with this model leads to
+ // duplicate triples in the inferred model (not always the same number)
+ reasoner.setMode(GenericRuleReasoner.FORWARD_RETE);
+
+ // already perform reasoning on the model with the ontology triples
+ LOGGER.info("Start preparing reasoning during context enrichment with rule reasoner " +
+ "in context enricher of component {}", componentId);
+ long start = System.currentTimeMillis();
+ InfModel infModel = ModelFactory.createInfModel(
+ reasoner, ModelFactory.createDefaultModel());
+ infModel.prepare();
+ LOGGER.debug("Finished preparing reasoning during context enrichment with rule reasoner " +
+ "in context enricher of component {} in {} ms",
+ componentId, System.currentTimeMillis() - start);
+
+ // set resulting model as base model for context enrichment
+ this.baseModel = infModel;
+ }
+
+ return;
+ }
+
+ // IF THIS PART IS REACHED: queries definitely need to be executed
+ // on the ontology triples as well
+ // -> add ontology triples to a new Jena model
+ Model model = ModelFactory.createDefaultModel();
+ model.add(ontology.getModel());
+
+ if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING) {
+ // set base model for context enrichment to clean model
+ // with only the ontology triples
+ this.baseModel = model;
+
+ } else if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING) {
+ // convert OWL ontology to a set of Jena rules
+ // (i.e., extract all OWL 2 RL axioms from ontology and convert into rules that
+ // Jena understands)
+ // -> create a Jena rule reasoner that uses these rules for reasoning
+ LOGGER.info("Create Jena rule reasoner with rules extracted from ontology " +
+ "in context enricher of component {}", componentId);
+ GenericRuleReasoner reasoner = new GenericRuleReasoner(ontology.getRules());
+ // RETE algorithm is required to ensurer fast incremental reasoning
+ // -> downside: for some reason, the output of reasoning with this model leads to
+ // duplicate triples in the inferred model (not always the same number)
+ reasoner.setMode(GenericRuleReasoner.FORWARD_RETE);
+
+ // already perform reasoning on the model with the ontology triples
+ LOGGER.info("Start preparing reasoning during context enrichment with rule reasoner " +
+ "in context enricher of component {}", componentId);
+ long start = System.currentTimeMillis();
+ InfModel infModel = ModelFactory.createInfModel(reasoner, model);
+ infModel.prepare();
+ LOGGER.debug("Finished preparing reasoning during context enrichment with rule reasoner " +
+ "in context enricher of component {} in {} ms",
+ componentId, System.currentTimeMillis() - start);
+
+ // set resulting model as base model for context enrichment
+ this.baseModel = infModel;
+ }
+ }
+
+ public synchronized void enrichContext(Context context) {
+ long start, end;
+
+ if (queries.isEmpty()) {
+ // if no queries are registered, then no context enrichment
+ // needs to take place obviously
+ LOGGER.info("No queries to enrich context {} for component {}",
+ context.getId(), componentId);
+
+ return;
+ }
+
+ LOGGER.info("Enriching context {} for component {}: starting with context of {} triples",
+ context.getId(), componentId, context.size());
+
+ // create model for resulting context and add base context
+ Model result = ModelFactory.createDefaultModel();
+ result.add(context.getContext());
+
+ // add context data to model to execute queries
+ start = System.currentTimeMillis();
+ this.baseModel.add(context.getContext());
+ end = System.currentTimeMillis();
+ LOGGER.info("Enriching context {} for component {}: added {} context triples " +
+ "to base model (now containing {} triples) in {} ms",
+ context.getId(), componentId, context.size(), baseModel.size(), end - start);
+
+ // create model to remove at the end from the base model, and add base context
+ Model toBeRemoved = ModelFactory.createDefaultModel();
+ toBeRemoved.add(context.getContext());
+
+ // loop over all queries in order
+ for (int i = 0; i < queries.size(); i++) {
+ ContextEnrichingQuery query = queries.get(i);
+
+ // save model to execute query on
+ Model queryModel;
+ // -> in the reasoning case, a new query model will be constructed
+ // to remove any duplicates that have been created by the FORWARD_RETE
+ // rule reasoning
+ // -> the number of duplicate triples is not deterministic, but (luckily)
+ // the number of unique triples is deterministic!
+ // -> so these duplicates need to be removed for the queries
+ if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING ||
+ this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING) {
+ start = System.currentTimeMillis();
+
+ // start with empty query model
+ queryModel = ModelFactory.createDefaultModel();
+
+ // retrieve the two parts of the inferred model:
+ // - the raw model (triples after adding context but before doing incremental
+ // reasoning on new model with added context triples)
+ // - the deductions model (triples inferred from doing the incremental reasoning
+ // on new model with added context triples)
+ InfModel inferredBaseModel = (InfModel) this.baseModel;
+ Model rawModel = inferredBaseModel.getRawModel();
+ Model deductionsModel = inferredBaseModel.getDeductionsModel();
+
+ // find duplicates, i.e., triples in deductions model that were already present
+ // in raw model
+ Model duplicates = deductionsModel.intersection(rawModel);
+
+ // create new version of deductions model without the duplicate triples
+ Model nonDuplicateDeductionsModel = ModelFactory.createDefaultModel();
+ nonDuplicateDeductionsModel.add(deductionsModel);
+ nonDuplicateDeductionsModel.remove(duplicates);
+
+ // create query model from original raw model, and deductions model
+ // without the duplicate triples
+ queryModel.add(rawModel);
+ queryModel.add(nonDuplicateDeductionsModel);
+
+ end = System.currentTimeMillis();
+ LOGGER.info("Enriching context {} for component {}: removing {} duplicates in " +
+ "{} ms to construct query model with {} triples",
+ context.getId(), componentId,
+ duplicates.size(), end - start, queryModel.size());
+
+ } else {
+ // -> in non-reasoning cases, this will be the base model
+ queryModel = ModelFactory.createDefaultModel();
+ queryModel.add(this.baseModel);
+ }
+
+ start = System.currentTimeMillis();
+ try (QueryExecution queryExecution =
+ QueryExecutionFactory.create(query.getQuery(), queryModel)) {
+ // execute query on query model
+ Model queryResult = queryExecution.execConstruct();
+ end = System.currentTimeMillis();
+
+ LOGGER.info("Enriching context {} for component {}: executed query {} in " +
+ "{} ms to yield {} additional context triples",
+ context.getId(), componentId, query.getName(),
+ end - start, queryResult.size());
+ if (!queryResult.isEmpty()) {
+ JenaUtilities.printModel(queryResult);
+ }
+
+ // add resulting triples to context
+ result.add(queryResult);
+
+ // add resulting triples to base model to ensure dependent queries work
+ // (only if another query follows of course)
+ if (i != queries.size() - 1) {
+ LOGGER.info("Temporarily add {} additional context triples resulting from " +
+ "query {} to base model for execution of following query",
+ queryResult.size(), query.getName());
+ this.baseModel.add(queryResult);
+ toBeRemoved.add(queryResult);
+ }
+
+ } catch (Exception e) {
+ LOGGER.error("Error during the execution of query {} in context " +
+ "enricher of context {} for component {}",
+ query.getName(), context.getId(), componentId, e);
+
+ // if anything goes wrong during the context enrichment, the original
+ // context is returned instead of a partially enriched version
+ return;
+ }
+ }
+
+ // again remove all context data from the model
+ start = System.currentTimeMillis();
+ this.baseModel.remove(toBeRemoved);
+ end = System.currentTimeMillis();
+ LOGGER.info("Enriching context {} for component {}: removed context triples " +
+ "from base model in {} ms", context.getId(), componentId, end - start);
+
+ // update enriched context
+ context.enrichContext(result);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherFactory.java
new file mode 100644
index 0000000..69b4070
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherFactory.java
@@ -0,0 +1,26 @@
+package be.ugent.idlab.divide.core.context;
+
+public class ContextEnricherFactory {
+
+ /**
+ * Create and return a new DIVIDE context enricher.
+ *
+ * @return newly created DIVIDE context enricher
+ */
+ public static synchronized IContextEnricher createInstance(ContextEnrichment contextEnrichment,
+ String componentId) {
+ // only create a context enricher with actual logic, if context enriching queries
+ // are defined in the given context enrichment
+ if (contextEnrichment == null ||
+ contextEnrichment.getQueries() == null ||
+ contextEnrichment.getQueries().isEmpty()) {
+ return new DummyContextEnricher();
+ } else {
+ return new ContextEnricher(
+ contextEnrichment.getQueries(),
+ contextEnrichment.getMode(),
+ componentId);
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherMode.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherMode.java
new file mode 100644
index 0000000..2b836ce
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherMode.java
@@ -0,0 +1,26 @@
+package be.ugent.idlab.divide.core.context;
+
+public enum ContextEnricherMode {
+
+ EXECUTE_ON_CONTEXT_WITHOUT_REASONING(false, false),
+ EXECUTE_ON_CONTEXT_WITH_REASONING(false, true),
+ EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING(true, false),
+ EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING(true, true);
+
+ private final boolean loadOntology;
+ private final boolean performReasoning;
+
+ ContextEnricherMode(boolean loadOntology, boolean performReasoning) {
+ this.loadOntology = loadOntology;
+ this.performReasoning = performReasoning;
+ }
+
+ public boolean loadOntology() {
+ return loadOntology;
+ }
+
+ public boolean performReasoning() {
+ return performReasoning;
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichingQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichingQuery.java
new file mode 100644
index 0000000..afd742d
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichingQuery.java
@@ -0,0 +1,26 @@
+package be.ugent.idlab.divide.core.context;
+
+public class ContextEnrichingQuery {
+
+ private final String name;
+ private final String query;
+
+ ContextEnrichingQuery(String name, String query) {
+ this.name = name;
+ this.query = query;
+ }
+
+ ContextEnrichingQuery(int order, String query) {
+ this.name = String.format("query-%d", order);
+ this.query = query;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getQuery() {
+ return query;
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichment.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichment.java
new file mode 100644
index 0000000..63318ea
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichment.java
@@ -0,0 +1,56 @@
+package be.ugent.idlab.divide.core.context;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+
+public class ContextEnrichment {
+
+ private final ContextEnricherMode mode;
+ private final List queries;
+
+ public ContextEnrichment() {
+ // default constructor when no context enrichment is available
+ this.mode = ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING;
+ this.queries = new ArrayList<>();
+ }
+
+ public ContextEnrichment(boolean doReasoning,
+ boolean executeWithOntologyTriples,
+ List queries) {
+ // set list of queries
+ this.queries = IntStream.range(0, queries.size())
+ .mapToObj(i -> new ContextEnrichingQuery(i, queries.get(i)))
+ .filter(query -> query.getQuery() != null && !query.getQuery().trim().isEmpty())
+ .collect(Collectors.toList());
+
+ // set correct mode
+ if (this.queries.isEmpty()) {
+ this.mode = ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING;
+ } else {
+ this.mode = executeWithOntologyTriples ?
+ (doReasoning ? ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING :
+ ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING) :
+ (doReasoning ? ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING :
+ ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING);
+ }
+ }
+
+ public ContextEnricherMode getMode() {
+ return mode;
+ }
+
+ public List getQueries() {
+ return queries;
+ }
+
+ @Override
+ public String toString() {
+ return "ContextEnrichment{" +
+ "mode=" + mode +
+ ", queries=" + queries +
+ '}';
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/DummyContextEnricher.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/DummyContextEnricher.java
new file mode 100644
index 0000000..04d5ed1
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/DummyContextEnricher.java
@@ -0,0 +1,17 @@
+package be.ugent.idlab.divide.core.context;
+
+import be.ugent.idlab.divide.core.engine.DivideOntology;
+
+public class DummyContextEnricher implements IContextEnricher {
+
+ @Override
+ public void registerOntology(DivideOntology ontology) {
+ // do nothing - empty on purpose
+ }
+
+ @Override
+ public void enrichContext(Context context) {
+ // do nothing - empty on purpose
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/IContextEnricher.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/IContextEnricher.java
new file mode 100644
index 0000000..cc206b4
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/IContextEnricher.java
@@ -0,0 +1,11 @@
+package be.ugent.idlab.divide.core.context;
+
+import be.ugent.idlab.divide.core.engine.DivideOntology;
+
+public interface IContextEnricher {
+
+ void registerOntology(DivideOntology ontology);
+
+ void enrichContext(Context context);
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideComponentManager.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideComponentManager.java
new file mode 100644
index 0000000..0783c1f
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideComponentManager.java
@@ -0,0 +1,268 @@
+package be.ugent.idlab.divide.core.engine;
+
+import be.ugent.idlab.divide.core.component.ComponentFactory;
+import be.ugent.idlab.divide.core.component.IComponent;
+import be.ugent.idlab.divide.core.context.Context;
+import be.ugent.idlab.divide.core.exception.DivideInitializationException;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+import be.ugent.idlab.divide.util.LogConstants;
+import be.ugent.idlab.kb.IIriResolver;
+import be.ugent.idlab.kb.IKnowledgeBase;
+import be.ugent.idlab.kb.IKnowledgeBaseObserver;
+import be.ugent.idlab.kb.exception.InvalidIriException;
+import be.ugent.idlab.kb.exception.KnowledgeBaseOperationException;
+import org.apache.jena.rdf.model.Model;
+import org.apache.jena.rdf.model.ModelFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class DivideComponentManager implements IKnowledgeBaseObserver {
+
+ private static final Logger LOGGER =
+ LoggerFactory.getLogger(DivideComponentManager.class.getName());
+
+ private final DivideEngine divideEngine;
+
+ private final IKnowledgeBase knowledgeBase;
+
+ private final Map registeredComponents;
+
+ /**
+ * Map which keeps track of how context IRIs are mapped to components observing
+ * this context IRI, i.e., for each context IRI, it keeps track of for which
+ * components the ABox with that IRI in the knowledge base partly defines its
+ * context. If a change to a specific context IRI is observed, this means that
+ * the query derivation should be triggered for each component in the
+ * associated list.
+ */
+ private final Map> contextIriObservers;
+
+ /**
+ * Boolean representing whether RSP engine streams on a component should be paused
+ * when context changes are detected that trigger the DIVIDE query derivation for
+ * that component.
+ */
+ private final boolean pauseRspEngineStreamsOnContextChanges;
+
+ /**
+ * Creates a new instance of a {@link DivideComponentManager} associated
+ * to the given {@link DivideEngine} and {@link IKnowledgeBase}.
+ *
+ * @param divideEngine DIVIDE engine for which the new instance should manage components,
+ * and which will perform the query derivation if the new component
+ * manager observes changes in the context associated to a component
+ * @param knowledgeBase knowledge base that should be used to observe any changes
+ * to the context of the managed components
+ * @param pauseRspEngineStreamsOnContextChanges boolean representing whether RSP engine
+ * streams on a component should be paused
+ * when context changes are detected that
+ * trigger the DIVIDE query derivation for
+ * that component
+ */
+ DivideComponentManager(DivideEngine divideEngine,
+ IKnowledgeBase knowledgeBase,
+ boolean pauseRspEngineStreamsOnContextChanges) {
+ this.divideEngine = divideEngine;
+ this.knowledgeBase = knowledgeBase;
+
+ this.registeredComponents = new HashMap<>();
+ this.contextIriObservers = new HashMap<>();
+
+ this.knowledgeBase.registerObserver(this);
+
+ this.pauseRspEngineStreamsOnContextChanges = pauseRspEngineStreamsOnContextChanges;
+ }
+
+ synchronized IComponent registerComponent(List contextIris,
+ RspQueryLanguage rspQueryLanguage,
+ String rspEngineUrl)
+ throws DivideInvalidInputException {
+ // resolve all context IRIs
+ List resolvedContextIris = new ArrayList<>();
+ try {
+ IIriResolver iriResolver = knowledgeBase.getIriResolver();
+ for (String contextIri : contextIris) {
+ resolvedContextIris.add(iriResolver.resolveIri(contextIri));
+ }
+ } catch (InvalidIriException e) {
+ throw new DivideInvalidInputException("Invalid context IRI(s) which cannot be " +
+ "resolved by the DIVIDE knowledge base", e);
+ }
+
+ // create component
+ IComponent component = ComponentFactory.createInstance(
+ resolvedContextIris, rspQueryLanguage, rspEngineUrl);
+
+ // ensure component with that ID does not yet exist
+ if (registeredComponents.containsKey(component.getId())) {
+ LOGGER.warn("Trying to register component with already existing ID");
+ return null;
+ }
+
+ LOGGER.info("Registering component with ID '{}'", component.getId());
+
+ // keep track of component by ID
+ registeredComponents.put(component.getId(), component);
+
+ return component;
+ }
+
+ void addContextIriObserver(String contextIri, IComponent component) {
+ if (contextIriObservers.containsKey(contextIri)) {
+ contextIriObservers.get(contextIri).add(component);
+ } else {
+ List observers = new ArrayList<>();
+ observers.add(component);
+ contextIriObservers.put(contextIri, observers);
+ }
+ }
+
+ /**
+ * @return removed component if component with given ID exists and is removed
+ * from the list of registered components, null if no component with
+ * given ID exists
+ */
+ synchronized IComponent unregisterComponent(String componentId) {
+ IComponent component = registeredComponents.remove(componentId);
+ if (component != null) {
+ LOGGER.info("Unregistering component with ID '{}'", componentId);
+
+ // remove component as observer for its context IRIs
+ component.getContextIris().forEach(
+ s -> contextIriObservers.get(s).remove(component));
+ }
+
+ return component;
+ }
+
+ synchronized Collection getRegisteredComponents() {
+ return registeredComponents.values();
+ }
+
+ synchronized IComponent getRegisteredComponentById(String id) {
+ return registeredComponents.get(id);
+ }
+
+ Model getContextAssociatedToComponent(String id) {
+ IComponent component = registeredComponents.get(id);
+ if (component != null) {
+ try {
+ Model componentContext = ModelFactory.createDefaultModel();
+ for (String contextIri : component.getContextIris()) {
+ // get ABox associated to each context IRI
+ Model context = knowledgeBase.getABox(contextIri);
+
+ // add retrieved context to full context of this component
+ componentContext.add(context.listStatements());
+ }
+ return componentContext;
+
+ } catch (KnowledgeBaseOperationException e) {
+ // if an error occurs when retrieving the knowledge base context
+ // for a given component, the context is incomplete and therefore
+ // considered non-existing
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Error occurred when retrieving current context of" +
+ " component with ID {}",
+ component.getId(), e);
+
+ return null;
+ }
+
+ } else {
+ return null;
+ }
+ }
+
+ @Override
+ public synchronized void notifyABoxUpdated(String iri, Model model) {
+ // check if queries need to be updated for components
+ // (is the case for components observing this iri)
+ boolean updateQueries = contextIriObservers.containsKey(iri) &&
+ !contextIriObservers.get(iri).isEmpty();
+
+ if (updateQueries) {
+ LOGGER.info("Receiving knowledge base update for ABox with IRI '{}'", iri);
+
+ // as soon as a context change is detected, the RSP engine should be paused
+ // until further notice (i.e., until the query registration finished at some
+ // point and restarts it again)
+ if (pauseRspEngineStreamsOnContextChanges) {
+ for (IComponent component : contextIriObservers.get(iri)) {
+ component.getRspEngineHandler().pauseRspEngineStreams();
+ }
+ }
+
+ // keep track of map with fetched contexts
+ Map contextSnapshots = new HashMap<>();
+
+ // handle every observing component
+ for (IComponent component : contextIriObservers.get(iri)) {
+ try {
+ Model componentContext = ModelFactory.createDefaultModel();
+
+ // retrieve context for every IRI that is part of this component's context
+ for (String contextIri : component.getContextIris()) {
+ Model context;
+ if (iri.equals(contextIri)) {
+ context = model;
+ } else if (contextSnapshots.containsKey(contextIri)) {
+ context = contextSnapshots.get(contextIri);
+ } else {
+ context = knowledgeBase.getABox(contextIri);
+ contextSnapshots.put(contextIri, context);
+ }
+
+ // add retrieved context to full context of this component
+ componentContext.add(context.listStatements());
+ }
+
+ // update queries for component using its full context
+ divideEngine.enqueueGeneralDivideQueryDerivationTask(
+ component, new Context(componentContext));
+
+ } catch (KnowledgeBaseOperationException e) {
+ // if an error occurs when retrieving the knowledge base context
+ // for a given component, no RSP query update is enqueued for this
+ // component (because the context is incomplete)
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Error occurred when retrieving current context of" +
+ " component with ID {} -> queries are NOT updated",
+ component.getId(), e);
+
+ // TODO MONITOR: 01/02/2021 do something with this?
+
+ }
+ }
+ }
+ }
+
+ @Override
+ public synchronized void notifyTBoxUpdated(Model model) {
+ Thread tBoxUpdateThread = new Thread(() -> {
+ try {
+ LOGGER.info("TBox of DIVIDE knowledge base updated -> reloaded as DIVIDE ontology");
+
+ // load new ontology to the DIVIDE engine
+ divideEngine.loadOntology(model);
+
+ } catch (DivideInvalidInputException | DivideInitializationException e) {
+ // if something goes wrong, it should be logged,
+ // BUT the engine is guaranteed to continue working with the
+ // latest successfully loaded ontology, so no further action
+ // is required
+ LOGGER.error("Reloading new TBox as DIVIDE ontology FAILED - DIVIDE engine will" +
+ " continue working with the latest successfully loaded ontology");
+ }
+ });
+ tBoxUpdateThread.start();
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngine.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngine.java
new file mode 100644
index 0000000..4141933
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngine.java
@@ -0,0 +1,1166 @@
+package be.ugent.idlab.divide.core.engine;
+
+import be.ugent.idlab.divide.core.component.IComponent;
+import be.ugent.idlab.divide.core.context.Context;
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+import be.ugent.idlab.divide.core.exception.DivideInitializationException;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException;
+import be.ugent.idlab.divide.core.query.DivideQueryFactory;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.core.query.parser.DivideQueryParserFactory;
+import be.ugent.idlab.divide.core.query.parser.IDivideQueryParser;
+import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException;
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+import be.ugent.idlab.divide.util.LogConstants;
+import be.ugent.idlab.kb.IKnowledgeBase;
+import be.ugent.idlab.kb.exception.KnowledgeBaseOperationException;
+import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaOwlApiUtilities;
+import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaRuleUtilities;
+import org.apache.jena.rdf.model.Model;
+import org.apache.jena.reasoner.rulesys.Rule;
+import org.semanticweb.owlapi.model.OWLOntology;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+class DivideEngine implements IDivideEngine {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(DivideEngine.class.getName());
+
+ /**
+ * Worker thread pool of maximum 100 threads to parallelize
+ * the query derivation process
+ */
+ private final ThreadPoolExecutor workerThreadPool;
+
+ /**
+ * Instance responsible for deriving the instantiated RSP-QL queries from
+ * the generic DIVIDE queries
+ */
+ private IDivideQueryDeriver divideQueryDeriver;
+
+ /**
+ * Manager of the DIVIDE components associated to this DIVIDE engine
+ */
+ private DivideComponentManager divideComponentManager;
+
+ /**
+ * Map linking a query name to the DIVIDE query instance
+ */
+ private final Map divideQueryMap;
+
+ /**
+ * Map linking a component ID to its queue where query update
+ * requests can be put
+ */
+ private final Map> componentQueryUpdateQueueMap;
+
+ /**
+ * Map linking a component ID to the thread that is processing
+ * its query update queue
+ */
+ private final Map componentQueryUpdateThreadMap;
+
+ private DivideOntology divideOntology;
+
+ /**
+ * Boolean representing whether the engine has been successfully initialized
+ */
+ private boolean initialized;
+
+ /**
+ * Boolean representing whether RSP engine streams on a component should be
+ * paused when context changes are detected that trigger the DIVIDE query
+ * derivation for that component
+ */
+ private boolean pauseRspEngineStreamsOnContextChanges;
+
+ /**
+ * Boolean representing whether variable matches in the input for the DIVIDE
+ * query parser that are not defined as mappings, should be considered as
+ * mappings by default
+ */
+ private boolean processUnmappedVariableMatchesInParser;
+
+ /**
+ * Boolean representing whether variables in the RSP-QL query body generated by
+ * the DIVIDE query parser, should be validated (= checked for occurrence in the
+ * WHERE clause of the query or in the set of input variables that will be
+ * substituted during the DIVIDE query derivation) during parsing
+ */
+ private boolean validateUnboundVariablesInRspQlQueryBodyInParser;
+
+ /**
+ * Patterns used for preprocessing a DIVIDE query's sensor query rule
+ */
+ private static final Pattern INPUT_VARIABLE_NAME_PATTERN =
+ Pattern.compile("\"\\?([^\"]+)\"");
+ private static final Pattern INPUT_VARIABLE_DEFINITION_PATTERN =
+ Pattern.compile("\\(" + INPUT_VARIABLE_NAME_PATTERN + "\\s+\\?[^()]+\\s*\\)");
+ private static final Pattern INPUT_VARIABLE_LIST_PATTERN =
+ Pattern.compile("(inputVariables>?)\\s+\\(\\s*(\\s*" +
+ INPUT_VARIABLE_DEFINITION_PATTERN + ")*\\s*\\)");
+
+ DivideEngine() {
+ // set initialized flag to false until initialize method is called
+ // and successfully ended
+ this.initialized = false;
+
+ // create engine objects
+ this.divideQueryDeriver = null;
+ this.divideComponentManager = null;
+ this.divideQueryMap = new HashMap<>();
+ this.componentQueryUpdateQueueMap = new HashMap<>();
+ this.componentQueryUpdateThreadMap = new HashMap<>();
+ this.divideOntology = null;
+
+ // create worker thread pool
+ this.workerThreadPool = (ThreadPoolExecutor) Executors.newFixedThreadPool(100);
+ this.workerThreadPool.setCorePoolSize(50);
+ this.workerThreadPool.setMaximumPoolSize(100);
+ // alive time set below has no influence on whether and when the query update
+ // threads for the different components are stopped, since they are running
+ // outside this worker thread pool (it is only used for the query derivation for
+ // a single DIVIDE query, submitted by the query update thread!)
+ this.workerThreadPool.setKeepAliveTime(1, TimeUnit.HOURS);
+ }
+
+ @Override
+ public void initialize(IDivideQueryDeriver divideQueryDeriver,
+ IKnowledgeBase knowledgeBase,
+ Model divideOntologyModel,
+ boolean pauseRspEngineStreamsOnContextChanges,
+ boolean processUnmappedVariableMatchesInParser,
+ boolean validateUnboundVariablesInRspQlQueryBodyInParser)
+ throws DivideInvalidInputException, DivideInitializationException {
+ LOGGER.info("Initializing DIVIDE engine...");
+
+ // save engine that handles the actual query derivation
+ this.divideQueryDeriver = divideQueryDeriver;
+
+ // load ontology in DIVIDE query deriver
+ loadOntology(divideOntologyModel);
+
+ // register ontology as TBox of the knowledge base
+ // (important to do this before creating the DIVIDE component manager,
+ // since this manager will register itself as an observer of the
+ // ontology, and should not receive a TBox update yet
+ try {
+ LOGGER.info("Registering DIVIDE ontology as TBox of knowledge base");
+ knowledgeBase.setTBox(divideOntologyModel);
+ } catch (KnowledgeBaseOperationException e) {
+ // should normally not occur
+ throw new DivideInitializationException(
+ "Error when registering DIVIDE ontology as TBox in knowledge base", e);
+ }
+
+ // save setting on pausing RSP engine streams
+ this.pauseRspEngineStreamsOnContextChanges = pauseRspEngineStreamsOnContextChanges;
+
+ // save parser settings
+ this.processUnmappedVariableMatchesInParser = processUnmappedVariableMatchesInParser;
+ this.validateUnboundVariablesInRspQlQueryBodyInParser =
+ validateUnboundVariablesInRspQlQueryBodyInParser;
+
+ // create DIVIDE component manager
+ this.divideComponentManager = new DivideComponentManager(
+ this, knowledgeBase, pauseRspEngineStreamsOnContextChanges);
+
+ // register successful initialization
+ this.initialized = true;
+ }
+
+ /**
+ * Loads the ontology that needs to be used as input (TBox) for each query
+ * derivation performed by the query deriver of this engine.
+ *
+ * If an ontology has been loaded successfully in the past at least once,
+ * this method will reload the ontology based on the new input. If something
+ * goes wrong during this reloading and an exception is thrown, the query
+ * deriver should still be in a valid state and continue working with the
+ * latest successfully loaded ontology.
+ *
+ * @throws DivideInitializationException if something goes wrong during the ontology
+ * loading process, which prevents the DIVIDE engine
+ * from functioning as it should
+ * @throws DivideInvalidInputException when the specified ontology contains invalid
+ * statements, i.e., statements which cannot be loaded
+ * by the query deriver
+ */
+ synchronized void loadOntology(Model divideOntologyModel)
+ throws DivideInvalidInputException, DivideInitializationException {
+ LOGGER.info("Loading ontology...");
+ LOGGER.debug(LogConstants.METRIC_MARKER, "LOAD_ONTOLOGY_START");
+
+ long start = System.currentTimeMillis();
+
+ // load ontology into DIVIDE query deriver
+ divideQueryDeriver.loadOntology(divideOntologyModel);
+
+ // convert ontology to a set of rules
+ // (to be used by the context enrichers)
+ OWLOntology divideOntology =
+ JenaOwlApiUtilities.getOWLOntology(divideOntologyModel);
+ List divideOntologyRules =
+ JenaRuleUtilities.convertOntologyToRulesList(divideOntology);
+
+ // save ontology model and rules to engine
+ this.divideOntology = new DivideOntology(divideOntologyModel, divideOntologyRules);
+
+ // update context enrichers for all components registered to engine
+ if (divideComponentManager != null) {
+ for (IComponent component : divideComponentManager.getRegisteredComponents()) {
+ enqueueContextEnricherUpdaterTask(component);
+ }
+ }
+
+ LOGGER.debug(LogConstants.METRIC_MARKER, "LOAD_ONTOLOGY_END");
+ LOGGER.info("Finished loading ontology in {} ms", System.currentTimeMillis() - start);
+ }
+
+ @Override
+ public IDivideQuery addDivideQuery(String name,
+ String queryPattern,
+ String sensorQueryRule,
+ String goal,
+ ContextEnrichment contextEnrichment) throws
+ DivideNotInitializedException, DivideQueryDeriverException, DivideInvalidInputException {
+ LOGGER.info("Adding DIVIDE query with name '{}'...", name);
+
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ // ensure DIVIDE query with that name does not yet exist
+ if (divideQueryMap.containsKey(name)) {
+ LOGGER.warn("Trying to add DIVIDE query with already existing name '{}'", name);
+ return null;
+ }
+
+ // preprocess sensor query rule to avoid issues with overlapping variables
+ sensorQueryRule = preprocessSensorQueryRule(sensorQueryRule);
+
+ // create DIVIDE query
+ IDivideQuery divideQuery = DivideQueryFactory.createInstance(
+ name, queryPattern, sensorQueryRule, goal, contextEnrichment);
+
+ try {
+ // validate the defined context enrichment of the new DIVIDE query
+ validateContextEnrichment(divideQuery.getContextEnrichment());
+
+ // register DIVIDE query at query deriver
+ divideQueryDeriver.registerQuery(divideQuery, getQueryParser());
+
+ // keep track of DIVIDE query in map
+ synchronized (divideQueryMap) {
+ divideQueryMap.put(name, divideQuery);
+ }
+
+ // start query derivation for this DIVIDE query only,
+ // for each component registered to the engine
+ for (IComponent component : divideComponentManager.getRegisteredComponents()) {
+ // to do so, first a context enricher should be created for this new query
+ // -> enqueue task to register new context enricher
+ enqueueContextEnricherUpdaterTask(component, divideQuery);
+
+ // retrieve current context associated to component
+ Model componentContext = divideComponentManager.
+ getContextAssociatedToComponent(component.getId());
+
+ // enqueue query derivation for new DIVIDE query if context of component
+ // exists and is non-empty
+ if (componentContext == null) {
+ LOGGER.info("No context available yet for component '{}' " +
+ "=> no query derivation for new DIVIDE query '{}' enqueued",
+ component.getId(), divideQuery.getName());
+ } else if (componentContext.isEmpty()) {
+ LOGGER.info("Available context for component '{}' is empty " +
+ "=> no query derivation for new DIVIDE query '{}' enqueued",
+ component.getId(), divideQuery.getName());
+ } else {
+ LOGGER.info("Context for component '{}' is available and non-empty " +
+ "=> query derivation for new DIVIDE query '{}' enqueued",
+ component.getId(), divideQuery.getName());
+ enqueueSpecificDivideQueryDerivationTask(
+ component, new Context(componentContext), divideQuery);
+ }
+ }
+
+ return divideQuery;
+
+ } catch (DivideInvalidInputException e) {
+ LOGGER.warn("Something went wrong when registering the new DIVIDE query to " +
+ "because the given input is invalid", e);
+ throw e;
+
+ } catch (DivideQueryDeriverException e) {
+ LOGGER.warn("Something went wrong when registering the new DIVIDE query to " +
+ "the query deriver - DIVIDE query is therefore NOT registered", e);
+ throw e;
+ }
+ }
+
+ private void validateContextEnrichment(ContextEnrichment contextEnrichment)
+ throws DivideInvalidInputException {
+ // validate every individual query
+ try {
+ getQueryParser().validateDivideQueryContextEnrichment(contextEnrichment);
+ } catch (InvalidDivideQueryParserInputException e) {
+ throw new DivideInvalidInputException(
+ "DIVIDE query contains invalid context-enriching queries" +
+ (e.getMessage() != null ? ": " + e.getMessage() : ""), e);
+ }
+ }
+
+ @Override
+ public void removeDivideQuery(String name,
+ boolean unregisterQueries) throws DivideNotInitializedException {
+ LOGGER.info("Removing DIVIDE query with name '{}'...", name);
+
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ IDivideQuery divideQuery;
+ synchronized (divideQueryMap) {
+ divideQuery = divideQueryMap.remove(name);
+ }
+ if (divideQuery != null) {
+ // unregister query at query deriver
+ divideQueryDeriver.unregisterQuery(divideQuery);
+
+ // enqueue task to handle the removal of the DIVIDE query at this component
+ // -> context enricher for this query will be unregistered
+ // -> all RSP queries that originate from this DIVIDE query are unregistered
+ // on the registered components of the system (only if specified to do so)
+ for (IComponent component : divideComponentManager.getRegisteredComponents()) {
+ enqueueDivideQueryRemovalHandlingTask(component, divideQuery, unregisterQueries);
+ }
+ }
+ }
+
+ @Override
+ public Collection getDivideQueries() throws DivideNotInitializedException {
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ return divideQueryMap.values();
+ }
+
+ @Override
+ public IDivideQuery getDivideQueryByName(String name) throws DivideNotInitializedException {
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ return divideQueryMap.get(name);
+ }
+
+ @Override
+ public IComponent registerComponent(List contextIris,
+ RspQueryLanguage rspQueryLanguage,
+ String rspEngineUrl)
+ throws DivideNotInitializedException, DivideInvalidInputException {
+ LOGGER.info("Adding new DIVIDE component...");
+
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ // register new component at component manager
+ IComponent component = divideComponentManager.registerComponent(
+ contextIris, rspQueryLanguage, rspEngineUrl);
+
+ // if component is not zero, prepare the engine for handling query update requests
+ if (component != null) {
+ // create a queue for this component where query updates requests can be put
+ final LinkedBlockingQueue queryUpdateQueue =
+ new LinkedBlockingQueue<>();
+ componentQueryUpdateQueueMap.put(component.getId(), queryUpdateQueue);
+
+ // create and start a thread for this component that continuously processes
+ // the query update queue
+ LOGGER.info("Starting new query update thread for component '{}'", component.getId());
+ Thread queryUpdateThread = new Thread(() ->
+ processRspQueryUpdateQueue(component, queryUpdateQueue));
+ queryUpdateThread.start();
+ componentQueryUpdateThreadMap.put(component.getId(), queryUpdateThread);
+
+ // enqueue a task to register a new context enricher associated to this component
+ // -> this will be done in parallel for the different existing DIVIDE queries
+ enqueueContextEnricherUpdaterTask(component);
+
+ // check if context is available for component, and if so, enqueue first
+ // query derivation before registering observers
+ Model componentContext = divideComponentManager.
+ getContextAssociatedToComponent(component.getId());
+ if (componentContext == null) {
+ LOGGER.info("No context available yet for component '{}' " +
+ "=> no query derivation enqueued yet", component.getId());
+ } else if (componentContext.isEmpty()) {
+ LOGGER.info("Available context for component '{}' is empty " +
+ "=> no query derivation enqueued yet", component.getId());
+ } else {
+ LOGGER.info("Context for component '{}' is available and non-empty " +
+ "=> first query derivation enqueued", component.getId());
+ enqueueGeneralDivideQueryDerivationTask(component, new Context(componentContext));
+ }
+
+ // register component as observer for all its context IRIs
+ component.getContextIris().forEach(
+ s -> divideComponentManager.addContextIriObserver(s, component));
+ }
+
+ return component;
+ }
+
+ @Override
+ public void unregisterComponent(String id,
+ boolean unregisterQueries) throws DivideNotInitializedException {
+ LOGGER.info("Unregistering DIVIDE component with ID {}...", id);
+
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ IComponent removed = divideComponentManager.unregisterComponent(id);
+
+ // handle query update queue & thread if component ID exists and
+ // is actually removed
+ if (removed != null) {
+ // no longer keep track of query update queue
+ componentQueryUpdateQueueMap.remove(id);
+
+ // interrupt the query update thread but still keep track of it so that
+ // it can be observed that this thread was interrupted
+ LOGGER.info("Interrupting query update thread of component {} because " +
+ "it is being removed", id);
+ Thread queryUpdateThread = componentQueryUpdateThreadMap.get(id);
+ queryUpdateThread.interrupt();
+
+ // also interrupt the RSP engine status update thread
+ if (pauseRspEngineStreamsOnContextChanges) {
+ removed.getRspEngineHandler().stopRspEngineStreamsUpdates();
+ }
+
+ // if specified, remove all queries registered on the RSP engine
+ // of this component by this DIVIDE engine
+ // (interrupting the query update thread will ensure no new
+ // registrations or unregistrations take place in this thread)
+ if (unregisterQueries) {
+ removed.getRspEngineHandler().unregisterAllQueries();
+ }
+ }
+ }
+
+ @Override
+ public Collection getRegisteredComponents() throws DivideNotInitializedException {
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ return divideComponentManager.getRegisteredComponents();
+ }
+
+ @Override
+ public IComponent getRegisteredComponentById(String id) throws DivideNotInitializedException {
+ if (!initialized) {
+ throw new DivideNotInitializedException();
+ }
+
+ return divideComponentManager.getRegisteredComponentById(id);
+ }
+
+ @Override
+ public IDivideQueryParser getQueryParser() {
+ return DivideQueryParserFactory.getInstance(
+ processUnmappedVariableMatchesInParser,
+ validateUnboundVariablesInRspQlQueryBodyInParser);
+ }
+
+ synchronized DivideOntology getDivideOntology() {
+ return divideOntology;
+ }
+
+ /**
+ * Preprocesses the sensor query rule of a new DIVIDE query.
+ * For this preprocessing, the list of input variables in the consequence of the
+ * rule is retrieved, parsed, checked for validity, and modified. The modifying
+ * part consists of updating the order in which the input variables occur in the
+ * list: if any input variable contains another one, the longer one should be
+ * present first in the list, to avoid later substitution errors during the query
+ * substitution process.
+ *
+ * @param sensorQueryRule sensor query rule to be preprocessed
+ * @return preprocessed sensor query rule
+ * @throws DivideInvalidInputException if definition of input variables in sensor
+ * query rule is invalid
+ */
+ private String preprocessSensorQueryRule(String sensorQueryRule)
+ throws DivideInvalidInputException {
+ Matcher m1 = INPUT_VARIABLE_LIST_PATTERN.matcher(sensorQueryRule);
+ if (m1.find()) {
+ Matcher m2 = INPUT_VARIABLE_DEFINITION_PATTERN.matcher(m1.group());
+ Map inputVariablesMap = new HashMap<>();
+ while(m2.find()) {
+ Matcher m3 = INPUT_VARIABLE_NAME_PATTERN.matcher(m2.group());
+ if (m3.find()) {
+ inputVariablesMap.put(m3.group(1), m2.group());
+ } else {
+ throw new DivideInvalidInputException(
+ "Sensor query rule of DIVIDE query does not " +
+ "contain a valid definition of the DIVIDE input variables");
+ }
+ }
+
+ // sort input variable names occurring in list
+ // -> construct new RDF list of lists (for input variables) based on sorted names
+ String sortedInputVariables = inputVariablesMap.keySet()
+ .stream()
+ .sorted((s1, s2) -> s1.contains(s2) ?
+ (s1.equals(s2) ? 0 : -1) :
+ (s2.contains(s1) ? 1 : s1.compareTo(s2)))
+ .map(inputVariablesMap::get)
+ .collect(Collectors.joining(" "));
+ String replacement = String.format("%s (%s)", m1.group(1), sortedInputVariables);
+
+ return sensorQueryRule.replaceFirst(Pattern.quote(m1.group()), replacement);
+
+ } else {
+ throw new DivideInvalidInputException("Sensor query rule of DIVIDE query does not " +
+ "contain a valid definition of the DIVIDE input variables");
+ }
+ }
+
+ /**
+ * Adds a task to this component's queue to update the RSP queries for
+ * this {@link IComponent}, with the specified context as input for the
+ * query derivation.
+ * This method will be called by the {@link DivideComponentManager} when
+ * knowledge base changes to the context relevant for the given component
+ * are observed.
+ *
+ * @param component {@link IComponent} for which the RSP queries should be updated
+ * @param context context model to use as input for the query derivation, i.e.,
+ * the relevant context of the context IRIs associated to the given
+ * {@link IComponent} instance
+ */
+ void enqueueGeneralDivideQueryDerivationTask(IComponent component, Context context) {
+ try {
+ LOGGER.info("Enqueueing general DIVIDE query derivation task for component " +
+ "with ID '{}' and context ID '{}'",
+ component.getId(), context.getId());
+
+ // retrieve component's query update request queue
+ LinkedBlockingQueue queue =
+ componentQueryUpdateQueueMap.get(component.getId());
+
+ // if there is still any update task waiting in the queue, it can be
+ // removed since the context has again been updated meanwhile
+ // -> for the same tasks as these: it would not make sense to first
+ // do the update with the old context and then after that with the
+ // new context; better immediately do it with the new context)
+ // -> for a task to unregister all queries associated to a removed
+ // DIVIDE query: since the new query derivation will no longer do
+ // the query derivation for this removed DIVIDE query, an update
+ // of the queries after the derivation will automatically result in
+ // the removal of all queries associated to this removed DIVIDE
+ // query (since no associated queries will end up in the list of
+ // new queries)
+ // -> for a task to register new queries associated to a new DIVIDE
+ // query: since the new query derivation task will involve the query
+ // derivation for all registered DIVIDE queries, it will automatically
+ // also include the derivation of this new DIVIDE query
+ queue.clear();
+
+ // enqueue query update request with newest context in the component's queue
+ queue.put(new GeneralDivideQueryDerivationTask(component, context));
+
+ // restart query update thread if needed
+ restartQueryUpdateThreadIfNeeded(component, queue);
+
+ } catch (InterruptedException ignored) {
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Enqueueing general DIVIDE query derivation task for component with ID '{}'" +
+ "and context ID '{}' resulted in unexpected InterruptedException",
+ component.getId(), context.getId());
+
+ // retry if interrupted while waiting (but the queue is not bounded
+ // so normally the queue put operation should not block)
+ enqueueGeneralDivideQueryDerivationTask(component, context);
+ }
+ }
+
+ /**
+ * Adds a task to this component's queue to update the RSP queries associated
+ * to the given {@link IDivideQuery} for this {@link IComponent}, with the
+ * specified context as input for the query derivation.
+ * This method will be called upon the registration of a new DIVIDE query.
+ *
+ * @param component {@link IComponent} for which the RSP queries should be updated
+ * @param context context model to use as input for the query derivation, i.e.,
+ * the relevant context of the context IRIs associated to the given
+ * {@link IComponent} instance
+ * @param divideQuery DIVIDE query for which the query derivation task should
+ * be enqueued
+ */
+ void enqueueSpecificDivideQueryDerivationTask(IComponent component,
+ Context context,
+ IDivideQuery divideQuery) {
+ try {
+ LOGGER.info("Enqueueing specific DIVIDE query derivation task for DIVIDE " +
+ "query '{}' for component " +
+ "with ID '{}' and context ID '{}'",
+ divideQuery.getName(), component.getId(), context.getId());
+
+ // retrieve component's query update request queue
+ LinkedBlockingQueue queue =
+ componentQueryUpdateQueueMap.get(component.getId());
+
+ // enqueue query update request with newest context in the component's queue
+ queue.put(new SpecificDivideQueryDerivationTask(
+ component, context, divideQuery));
+
+ // restart query update thread if needed
+ restartQueryUpdateThreadIfNeeded(component, queue);
+
+ } catch (InterruptedException ignored) {
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Enqueueing specific DIVIDE query derivation task for DIVIDE query '{}'," +
+ "component with ID '{}' and context ID '{}' resulted " +
+ "in unexpected InterruptedException",
+ divideQuery.getName(), component.getId(), context.getId());
+
+ // retry if interrupted while waiting (but the queue is not bounded
+ // so normally the queue put operation should not block)
+ enqueueSpecificDivideQueryDerivationTask(component, context, divideQuery);
+ }
+ }
+
+ /**
+ * Adds a task to this component's queue to unregister the RSP queries for
+ * this {@link IComponent} that are currently registered via the DIVIDE query
+ * derivation of the specified DIVIDE query.
+ * This method will be called by this engine when a DIVIDE query is unregistered
+ * from the DIVIDE engine.
+ *
+ * @param component {@link IComponent} for which the RSP queries should be unregistered
+ * @param divideQuery DIVIDE query of which the associated RSP engine queries need to be
+ * unregistered from the wrapped RSP engine
+ */
+ private void enqueueDivideQueryRemovalHandlingTask(IComponent component,
+ IDivideQuery divideQuery,
+ boolean unregisterQueries) {
+ try {
+ LOGGER.info("Enqueueing DIVIDE query removal handling task for component " +
+ "with ID '{}' and DIVIDE query '{}'",
+ component.getId(), divideQuery.getName());
+
+ // retrieve component's query update request queue
+ LinkedBlockingQueue queue =
+ componentQueryUpdateQueueMap.get(component.getId());
+
+ // enqueue removal handling task
+ // IMPORTANT: the queue is not cleared in this case, since this involves
+ // no query derivation of the remaining DIVIDE queries which
+ // is therefore a task that cannot be ignored
+ queue.put(new DivideQueryRemovalHandlingTask(
+ component, divideQuery, unregisterQueries));
+
+ // restart query update thread if needed
+ restartQueryUpdateThreadIfNeeded(component, queue);
+
+ } catch (InterruptedException ignored) {
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Enqueueing DIVIDE query removal handling task for component with ID '{}' " +
+ "and DIVIDE query '{}' resulted in unexpected InterruptedException",
+ component.getId(), divideQuery.getName());
+
+ // retry if interrupted while waiting (but the queue is not bounded
+ // so normally the queue put operation should not block)
+ enqueueDivideQueryRemovalHandlingTask(component, divideQuery, unregisterQueries);
+ }
+ }
+
+ private void enqueueContextEnricherUpdaterTask(IComponent component,
+ IDivideQuery divideQuery) {
+ try {
+ LOGGER.info("Enqueueing task to update context enrichers for DIVIDE " +
+ "query '{}' for component with ID '{}'",
+ divideQuery.getName(), component.getId());
+
+ // retrieve component's query update request queue
+ LinkedBlockingQueue queue =
+ componentQueryUpdateQueueMap.get(component.getId());
+
+ // enqueue query update request with newest context in the component's queue
+ queue.put(new ContextEnricherUpdaterTask(component, divideQuery));
+
+ // restart query update thread if needed
+ restartQueryUpdateThreadIfNeeded(component, queue);
+
+ } catch (InterruptedException ignored) {
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Enqueueing task to update context enrichers for DIVIDE query '{}' for " +
+ "component with ID '{}' resulted in unexpected InterruptedException",
+ divideQuery.getName(), component.getId());
+
+ // retry if interrupted while waiting (but the queue is not bounded
+ // so normally the queue put operation should not block)
+ enqueueContextEnricherUpdaterTask(component, divideQuery);
+ }
+ }
+
+ private void enqueueContextEnricherUpdaterTask(IComponent component) {
+ try {
+ LOGGER.info("Enqueueing task to update context enrichers for all DIVIDE queries " +
+ "on component with ID '{}'", component.getId());
+
+ // retrieve component's query update request queue
+ LinkedBlockingQueue queue =
+ componentQueryUpdateQueueMap.get(component.getId());
+
+ // enqueue query update request with newest context in the component's queue
+ queue.put(new ContextEnricherUpdaterTask(component));
+
+ // restart query update thread if needed
+ restartQueryUpdateThreadIfNeeded(component, queue);
+
+ } catch (InterruptedException ignored) {
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Enqueueing task to update context enrichers for all DIVIDE queries on " +
+ "component with ID '{}' resulted in unexpected InterruptedException",
+ component.getId());
+
+ // retry if interrupted while waiting (but the queue is not bounded
+ // so normally the queue put operation should not block)
+ enqueueContextEnricherUpdaterTask(component);
+ }
+ }
+
+ private void restartQueryUpdateThreadIfNeeded(IComponent component,
+ LinkedBlockingQueue queue) {
+ // check if thread that is processing query updates is interrupted
+ // (in that case it will have been removed from the query update thread map),
+ // and if so, create & start new thread for this
+ Thread queryUpdateThread =
+ componentQueryUpdateThreadMap.get(component.getId());
+ if (queryUpdateThread == null) {
+ LOGGER.info("Query update thread for component with ID '{}' has been " +
+ "interrupted, so a new thread is started",
+ component.getId());
+
+ Thread newQueryUpdateThread = new Thread(() ->
+ processRspQueryUpdateQueue(component, queue));
+ newQueryUpdateThread.start();
+ componentQueryUpdateThreadMap.put(component.getId(), newQueryUpdateThread);
+ }
+ }
+
+ private void processRspQueryUpdateQueue(IComponent component,
+ LinkedBlockingQueue queryUpdateQueue) {
+ try {
+ boolean interrupted = false;
+ while (!interrupted) {
+ // retrieve the updated context from the queue - blocks if the
+ // queue is empty until an item again enters the queue
+ IDivideQueryUpdateTask queryUpdateTask = queryUpdateQueue.take();
+
+ // launch the query derivation for the given component & context
+ interrupted = queryUpdateTask.execute();
+ }
+
+ // thread is interrupted explicitly by the system, probably because
+ // the component is unregistered
+ LOGGER.info("Query update thread for component '{}' is found interrupted after query" +
+ " update, so is stopping with the processing of the query update queue",
+ component.getId());
+
+ } catch (InterruptedException e) {
+ LOGGER.info("Query update thread for component '{}' is interrupted while waiting," +
+ " so is stopping with the processing of the query update queue",
+ component.getId());
+ }
+
+ // remove thread from query update thread map so that the engine
+ // knows a new thread should be started upon arrival of a new
+ // query update request
+ // (after exiting this method, the thread status will become TERMINATED)
+ componentQueryUpdateThreadMap.remove(component.getId());
+ }
+
+ private class GeneralDivideQueryDerivationTask implements IDivideQueryUpdateTask {
+
+ private final Logger LOGGER = LoggerFactory.getLogger(
+ GeneralDivideQueryDerivationTask.class.getName());
+
+ private final IComponent component;
+ private final Context context;
+
+ GeneralDivideQueryDerivationTask(IComponent component, Context context) {
+ this.component = component;
+ this.context = context;
+ }
+
+ @Override
+ public boolean execute() {
+ LOGGER.info("Preparing DIVIDE query derivation for component with ID '{}' " +
+ "and context '{}' in RSP query update thread",
+ component.getId(), context.getId());
+
+ // retrieve list of queries
+ Collection divideQueries;
+ synchronized (divideQueryMap) {
+ divideQueries = divideQueryMap.values();
+ }
+
+ // stop if list of queries is empty
+ if (divideQueries.isEmpty()) {
+ LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " +
+ "no DIVIDE queries registered, so finishing task",
+ component.getId(), context.getId());
+ return Thread.currentThread().isInterrupted();
+ }
+
+ // run the query derivation scripts in parallel for every DIVIDE query,
+ // each on a dedicated thread in the worker thread pool
+ CountDownLatch latch = new CountDownLatch(divideQueries.size());
+ DivideOntology ontology = getDivideOntology();
+ for (IDivideQuery divideQuery : divideQueries) {
+ workerThreadPool.submit(new SingleQueryDeriver(
+ divideQuery, context, component,
+ divideQueryDeriver, ontology, latch));
+ }
+
+ // keep track of whether the thread gets interrupted while waiting for
+ // the other threads to finish
+ boolean interruptedWhileWaiting = false;
+
+ // wait until the query derivation threads have all finished
+ boolean queryDerivationThreadsFinished = false;
+ while (!queryDerivationThreadsFinished) {
+ try {
+ LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " +
+ "waiting for other threads to finish the individual query derivations" +
+ " for the different DIVIDE queries",
+ component.getId(), context.getId());
+
+ // wait for the latch to be decremented by the query derivation threads
+ latch.await();
+
+ // if the previous call returns, this means that all threads have
+ // finished (since they all count down the latch when finished)
+ queryDerivationThreadsFinished = true;
+
+ } catch (InterruptedException e) {
+ // interrupts of this thread should be ignored, since it is really
+ // required to await the latch being count down to zero
+ // (and only handle interrupt requests at the end of this method)
+ LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " +
+ "query update thread interrupted while waiting for other threads",
+ component.getId(), context.getId());
+
+ // it is however important to remember that this interruption happened
+ // while waiting
+ interruptedWhileWaiting = true;
+ }
+ }
+
+ // check if thread has been interrupted up to this point
+ // -> if so, no query registration update should take place
+ // (in normal circumstances this only happens if the component is
+ // unregistered from the engine)
+ if (interruptedWhileWaiting || Thread.currentThread().isInterrupted()) {
+ LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " +
+ "not registering queries since query update thread has been interrupted",
+ component.getId(), context.getId());
+
+ // clearing registration schedule since no queries will be registered anymore
+ component.getRspEngineHandler().clearRegistrationSchedule();
+
+ // check if there already is a new general DIVIDE query derivation task
+ // in the queue at this point, AND the RSP engine streams are paused
+ // on context changes
+ // -> if yes, no query registration update should take place (since the
+ // RSP engine streams are paused, and it only makes sense to update
+ // them after the last general DIVIDE query derivation task in the
+ // queue has completed (since they are derived from the most recent
+ // up-to-date context)
+ } else if (pauseRspEngineStreamsOnContextChanges &&
+ componentQueryUpdateQueueMap.get(component.getId()).stream().anyMatch(
+ queryUpdateTask -> queryUpdateTask instanceof GeneralDivideQueryDerivationTask)) {
+ LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " +
+ "not registering queries since query update queue contains new " +
+ "general DIVIDE query derivation task",
+ component.getId(), context.getId());
+
+ // clearing registration schedule since no queries will be registered anymore
+ component.getRspEngineHandler().clearRegistrationSchedule();
+
+ } else {
+ // update query registration at actual RSP engine
+ component.getRspEngineHandler().updateRegistration();
+ }
+
+ // in any case, it is still important to try restarting the engine
+ // (if being interrupted explicitly because the component is being
+ // removed, then all updates will be stopped explicitly anyway)
+ // -> and if no restart is allowed because of new pause requests
+ if (pauseRspEngineStreamsOnContextChanges) {
+ component.getRspEngineHandler().restartRspEngineStreams();
+ }
+
+ LOGGER.info("Finished DIVIDE query derivation for component with ID '{}' and context '{}'",
+ component.getId(), context.getId());
+
+ return interruptedWhileWaiting || Thread.currentThread().isInterrupted();
+ }
+ }
+
+ private class SpecificDivideQueryDerivationTask implements IDivideQueryUpdateTask {
+
+ private final Logger LOGGER = LoggerFactory.getLogger(
+ SpecificDivideQueryDerivationTask.class.getName());
+
+ private final IComponent component;
+ private final Context context;
+ private final IDivideQuery divideQuery;
+
+ SpecificDivideQueryDerivationTask(IComponent component,
+ Context context,
+ IDivideQuery divideQuery) {
+ this.component = component;
+ this.context = context;
+ this.divideQuery = divideQuery;
+ }
+
+ @Override
+ public boolean execute() {
+ LOGGER.info("Preparing specific DIVIDE query derivation for DIVIDE query '{}', " +
+ "for component with ID '{}' " +
+ "and context '{}' in RSP query update thread",
+ divideQuery.getName(), component.getId(), context.getId());
+
+ // run the query derivation script in for the given DIVIDE query,
+ // on a dedicated thread in the worker thread pool
+ CountDownLatch latch = new CountDownLatch(1);
+ workerThreadPool.submit(new SingleQueryDeriver(
+ divideQuery, context, component,
+ divideQueryDeriver, getDivideOntology(), latch));
+
+ // keep track of whether the thread gets interrupted while waiting for
+ // the other threads to finish
+ boolean interruptedWhileWaiting = false;
+
+ // wait until the query derivation threads have all finished
+ boolean queryDerivationThreadsFinished = false;
+ while (!queryDerivationThreadsFinished) {
+ try {
+ LOGGER.info("Specific DIVIDE query derivation for DIVIDE query '{}', " +
+ "for component with ID '{}' and context '{}': " +
+ "waiting for other thread to finish the individual query derivation",
+ divideQuery.getName(), component.getId(), context.getId());
+
+ // wait for the latch to be decremented by the the query derivation threads
+ latch.await();
+
+ // if the previous call returns, this means that the thread has finished
+ queryDerivationThreadsFinished = true;
+
+ } catch (InterruptedException e) {
+ // interrupts of this thread should be ignored, since it is really
+ // required to await the latch being count down to zero
+ // (and only handle interrupt requests at the end of this method)
+ LOGGER.info("Specific DIVIDE query derivation for DIVIDE query '{}', " +
+ "for component with ID '{}' and context '{}': " +
+ "query update thread interrupted while waiting for other threads",
+ divideQuery.getName(), component.getId(), context.getId());
+
+ // it is however important to remember that this interruption happened
+ // while waiting
+ interruptedWhileWaiting = true;
+ }
+ }
+
+ if (!interruptedWhileWaiting && !Thread.currentThread().isInterrupted()) {
+ // update query registration at actual RSP engine for this DIVIDE query
+ component.getRspEngineHandler().updateRegistration(divideQuery);
+ } else {
+ LOGGER.info("Specific DIVIDE query derivation for DIVIDE query '{}', " +
+ "for component with ID '{}' and context '{}': " +
+ "not registering queries since query update thread has been interrupted",
+ divideQuery.getName(), component.getId(), context.getId());
+
+ // clearing registration schedule since no queries will be registered anymore
+ component.getRspEngineHandler().clearRegistrationSchedule(divideQuery);
+ }
+
+ LOGGER.info("Finished DIVIDE query derivation for component with ID '{}' and context '{}'",
+ component.getId(), context.getId());
+
+ return interruptedWhileWaiting || Thread.currentThread().isInterrupted();
+ }
+ }
+
+ private static class DivideQueryRemovalHandlingTask implements IDivideQueryUpdateTask {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(
+ DivideQueryRemovalHandlingTask.class.getName());
+
+ private final IComponent component;
+ private final IDivideQuery divideQuery;
+ private final boolean unregisterQueries;
+
+ DivideQueryRemovalHandlingTask(IComponent component,
+ IDivideQuery divideQuery,
+ boolean unregisterQueries) {
+ this.component = component;
+ this.divideQuery = divideQuery;
+ this.unregisterQueries = unregisterQueries;
+ }
+
+ @Override
+ public boolean execute() {
+ LOGGER.info("Handling the removal of DIVIDE query '{}' at component with ID '{}':" +
+ "unregistering context enrichers{}",
+ divideQuery.getName(), component.getId(),
+ unregisterQueries ? " and starting the unregistering of all queries" : "");
+
+ if (!Thread.currentThread().isInterrupted()) {
+ // unregister context enricher for the given component and DIVIDE query
+ component.unregisterContextEnricher(divideQuery);
+
+ if (unregisterQueries) {
+ // update query registration at actual RSP engine
+ component.getRspEngineHandler().
+ unregisterAllQueriesOriginatingFromDivideQuery(divideQuery);
+ }
+ } else {
+ LOGGER.info("Unregistering of all queries at {} associated to " +
+ "removed DIVIDE query {} has been interrupted",
+ component.getId(), divideQuery.getName());
+ }
+
+ LOGGER.info("Finished unregistering of all queries at {} associated to " +
+ "removed DIVIDE query {}",
+ component.getId(), divideQuery.getName());
+
+ return Thread.currentThread().isInterrupted();
+ }
+
+ }
+
+ private class ContextEnricherUpdaterTask implements IDivideQueryUpdateTask {
+
+ private final Logger LOGGER = LoggerFactory.getLogger(
+ ContextEnricherUpdaterTask.class.getName());
+
+ private final IComponent component;
+ private final Collection divideQueries;
+
+ ContextEnricherUpdaterTask(IComponent component,
+ IDivideQuery divideQuery) {
+ this.component = component;
+ this.divideQueries = Collections.singletonList(divideQuery);
+ }
+
+ ContextEnricherUpdaterTask(IComponent component) {
+ this.component = component;
+ this.divideQueries = new ArrayList<>();
+ }
+
+ @Override
+ public boolean execute() {
+ // retrieve list of all DIVIDE queries if no set of queries is specified for this task
+ if (divideQueries.isEmpty()) {
+ synchronized (divideQueryMap) {
+ divideQueries.addAll(divideQueryMap.values());
+ }
+ }
+
+ // stop if list of queries is empty
+ if (divideQueries.isEmpty()) {
+ LOGGER.info("Task to update context enrichers for component with ID '{}': " +
+ "stopped, since no DIVIDE queries are registered",
+ component.getId());
+ return Thread.currentThread().isInterrupted();
+ }
+
+ LOGGER.info("Starting task to update context enrichers for component with ID '{}' " +
+ "and following queries: {}",
+ component.getId(),
+ divideQueries.stream().map(IDivideQuery::getName).collect(Collectors.joining(", ")));
+ long start = System.currentTimeMillis();
+
+ // create the context enricher for each given DIVIDE query,
+ // on a dedicated thread in the worker thread pool
+ CountDownLatch latch = new CountDownLatch(divideQueries.size());
+ for (IDivideQuery divideQuery : divideQueries) {
+ workerThreadPool.submit(new SingleContextEnricherUpdater(
+ component,
+ divideQuery,
+ getDivideOntology(),
+ latch));
+ }
+
+ // keep track of whether the thread gets interrupted while waiting for
+ // the other threads to finish
+ boolean interruptedWhileWaiting = false;
+
+ // wait until the context enricher creation threads have all finished
+ boolean threadsFinished = false;
+ while (!threadsFinished) {
+ try {
+ LOGGER.info("Waiting for threads to finish parallel updating of context enricher " +
+ "for component with ID '{}' for {} DIVIDE queries",
+ component.getId(), divideQueries.size());
+
+ // wait for the latch to be decremented by the different threads
+ // -> after this method call completes, all parallel tasks are finished
+ latch.await();
+
+ // if the previous call returns, this means that the thread has finished
+ threadsFinished = true;
+
+ } catch (InterruptedException e) {
+ // interrupts of this thread should be ignored, since it is really
+ // required to await the latch being count down to zero
+ // (and only handle interrupt requests at the end of this method)
+ LOGGER.info("Parallel updating of context enricher for component with ID '{}': " +
+ "query update thread interrupted while waiting for other threads",
+ component.getId());
+
+ // it is however important to remember that this interruption happened
+ // while waiting
+ interruptedWhileWaiting = true;
+ }
+ }
+
+ LOGGER.info("Finished updating context enrichers for component with ID '{}' and " +
+ "queries {} in {} ms",
+ component.getId(),
+ divideQueries.stream().map(IDivideQuery::getName).collect(Collectors.toList()),
+ System.currentTimeMillis() - start);
+
+ return interruptedWhileWaiting || Thread.currentThread().isInterrupted();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngineFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngineFactory.java
new file mode 100644
index 0000000..eb30fb6
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngineFactory.java
@@ -0,0 +1,14 @@
+package be.ugent.idlab.divide.core.engine;
+
+public class DivideEngineFactory {
+
+ /**
+ * Create and return a new DIVIDE engine.
+ *
+ * @return newly created DIVIDE engine
+ */
+ public static synchronized IDivideEngine createInstance() {
+ return new DivideEngine();
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideOntology.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideOntology.java
new file mode 100644
index 0000000..3f36218
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideOntology.java
@@ -0,0 +1,33 @@
+package be.ugent.idlab.divide.core.engine;
+
+import org.apache.jena.rdf.model.Model;
+import org.apache.jena.reasoner.rulesys.Rule;
+
+import java.util.List;
+import java.util.UUID;
+
+public class DivideOntology {
+
+ private final String id;
+ private final Model model;
+ private final List rules;
+
+ public DivideOntology(Model model, List rules) {
+ this.id = UUID.randomUUID().toString();
+ this.model = model;
+ this.rules = rules;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public Model getModel() {
+ return model;
+ }
+
+ public List getRules() {
+ return rules;
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideEngine.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideEngine.java
new file mode 100644
index 0000000..55a9ca1
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideEngine.java
@@ -0,0 +1,231 @@
+package be.ugent.idlab.divide.core.engine;
+
+import be.ugent.idlab.divide.core.component.IComponent;
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+import be.ugent.idlab.divide.core.exception.DivideInitializationException;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.core.query.parser.IDivideQueryParser;
+import be.ugent.idlab.divide.rsp.RspQueryLanguage;
+import be.ugent.idlab.kb.IKnowledgeBase;
+import org.apache.jena.rdf.model.Model;
+
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Main engine of DIVIDE.
+ *
+ * Takes care of the handling of DIVIDE queries, and contains references to
+ * instances of {@link IDivideQueryDeriver} for the query derivation.
+ *
+ * It also is responsible for managing the components within the DIVIDE system.
+ * It keeps track of a collection of registered {@link IComponent} instances,
+ * and provides methods to register, unregister and retrieve them.
+ * It registers itself as observer to the {@link IKnowledgeBase} of
+ * this engine, to trigger the query derivation for the relevant components
+ * on context updates.
+ */
+public interface IDivideEngine {
+
+ /**
+ * Initializes the DIVIDE engine.
+ * Keeps track of the given {@link IDivideQueryDeriver} to be used for the
+ * query derivation when relevant knowledge base changes are observed.
+ * Changes to the knowledge base are only observed after this initialization
+ * method has successfully returned.
+ * Loads the ontology to the engine based on the list of ontology files.
+ * Creates a DIVIDE component manager to manage the DIVIDE components of this
+ * engine, and observe changes of the given knowledge base.
+ *
+ * @param divideQueryDeriver {@link IDivideQueryDeriver} used for the query derivation
+ * performed by this engine
+ * @param knowledgeBase {@link IKnowledgeBase} that should be observed for changes
+ * to know when the query derivation should be triggered
+ * @param divideOntology model representing all statements in the ontology that is used
+ * by DIVIDE for the query derivation, i.e., in the TBox of the
+ * knowledge base
+ * @param pauseRspEngineStreamsOnContextChanges boolean representing whether RSP engine
+ * streams on a component should be paused
+ * when context changes are detected that
+ * trigger the DIVIDE query derivation for
+ * that component
+ * @param processUnmappedVariableMatchesInParser boolean representing whether variable
+ * matches in the input for the DIVIDE query
+ * parser that are not defined as mappings,
+ * should be considered as mappings by default
+ * @param validateUnboundVariablesInRspQlQueryBodyInParser boolean representing whether variables
+ * in the RSP-QL query body generated by
+ * the DIVIDE query parser, should be
+ * validated (= checked for occurrence in
+ * the WHERE clause of the query or in the
+ * set of input variables that will be
+ * substituted during the DIVIDE query
+ * derivation) during parsing
+ * @throws DivideInitializationException if something goes wrong during the initialization
+ * process, which prevents the DIVIDE engine from
+ * functioning as it should
+ * @throws DivideInvalidInputException when the specified ontology contains invalid
+ * statements, i.e., statements which cannot be loaded
+ * by the query deriver
+ */
+ void initialize(IDivideQueryDeriver divideQueryDeriver,
+ IKnowledgeBase knowledgeBase,
+ Model divideOntology,
+ boolean pauseRspEngineStreamsOnContextChanges,
+ boolean processUnmappedVariableMatchesInParser,
+ boolean validateUnboundVariablesInRspQlQueryBodyInParser)
+ throws DivideInitializationException, DivideInvalidInputException;
+
+ /**
+ * Register a new DIVIDE query to this DIVIDE engine.
+ * The required format and language of the input parameters depends on the type
+ * of query deriver used - if any of the input parameters is invalid according
+ * to this query deriver, a {@link DivideInvalidInputException} will be thrown.
+ *
+ * @param name name of the new DIVIDE query
+ * @param queryPattern generic RSP-QL query pattern of this query
+ * @param sensorQueryRule sensor query rule to be used for the query derivation
+ * @param goal goal to be used for the query derivation
+ * @return the newly created {@link IDivideQuery} that is registered to
+ * the DIVIDE engine (or null if a DIVIDE query with the given name
+ * is already registered to the engine)
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ * @throws DivideQueryDeriverException when something goes wrong when registering the new
+ * DIVIDE query to the {@link IDivideQueryDeriver} of this
+ * engine, which prevents it from performing the query
+ * derivation for this query - this error has nothing to
+ * do with invalid parameters
+ * @throws DivideInvalidInputException when the registration fails because any of the new
+ * DIVIDE query parameters is invalid
+ */
+ IDivideQuery addDivideQuery(String name,
+ String queryPattern,
+ String sensorQueryRule,
+ String goal,
+ ContextEnrichment contextEnrichment)
+ throws DivideNotInitializedException, DivideQueryDeriverException, DivideInvalidInputException;
+
+ /**
+ * Removes an {@link IDivideQuery} with the given name from the list
+ * of queries registered to this DIVIDE engine.
+ *
+ * @param name name of query to remove from the DIVIDE engine (if no query
+ * with the given name is registered, nothing is done)
+ * @param unregisterQueries specifies whether all queries associated to this
+ * DIVIDE query should be unregistered on the components
+ * currently known by the system
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ */
+ void removeDivideQuery(String name,
+ boolean unregisterQueries) throws DivideNotInitializedException;
+
+ /**
+ * Retrieve list of {@link IDivideQuery} instances registered to this
+ * DIVIDE engine.
+ *
+ * @return list of DIVIDE queries registered to the engine
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ */
+ Collection getDivideQueries() throws DivideNotInitializedException;
+
+ /**
+ * Retrieve {@link IDivideQuery} with the given name that is registered
+ * to this DIVIDE engine.
+ *
+ * @param name name of the DIVIDE query to retrieve
+ * @return the {@link IDivideQuery} registered to the DIVIDE engine with the
+ * given name (null if no query with that name is registered)
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ */
+ IDivideQuery getDivideQueryByName(String name) throws DivideNotInitializedException;
+
+ /**
+ * Creates and registers a new {@link IComponent}.
+ * After successful registration (no exception is thrown or null is returned),
+ * this {@link IDivideEngine} performs the following task: when a
+ * change to any of the ABox IRIs specified as mainContextIri or
+ * additionalContextIris is observed, the query derivation at the associated
+ * {@link IDivideEngine} is triggered for this registered component.
+ *
+ * @param contextIris IRIs of the ABoxes in a knowledge base that represents the
+ * context associated to the new {@link IComponent}
+ * @param rspQueryLanguage RSP query language used by the RSP engine running on
+ * the created component
+ * @param rspEngineUrl URL which should be used for communicating with the RSP engine
+ * running on the created component, and which will also be mapped
+ * to a unique ID for the created component
+ * @return the new {@link IComponent} that is registered (or null if a component
+ * is already registered with the specified rspEngineUrl)
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ * @throws DivideInvalidInputException if any of the specified context IRIs is not valid
+ * (either the mainContextIri or an entry of the
+ * additionalContextIris list), OR if the
+ * rspEngineUrl is no valid URL
+ */
+ IComponent registerComponent(List contextIris,
+ RspQueryLanguage rspQueryLanguage,
+ String rspEngineUrl)
+ throws DivideNotInitializedException, DivideInvalidInputException;
+
+ /**
+ * Unregisters an {@link IComponent} with the given ID.
+ * After successful completion of this method (no null is returned), changes to
+ * the ABox IRIs in the knowledge base specified as context IRIs of this
+ * component do no longer result in triggering the query derivation process for
+ * this component.
+ *
+ * @param id ID of the component to unregister (if no component with the given
+ * ID is registered, nothing is done)
+ * @param unregisterQueries specifies whether all queries registered by DIVIDE on
+ * the RSP engine of this component should be unregistered
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ */
+ void unregisterComponent(String id,
+ boolean unregisterQueries) throws DivideNotInitializedException;
+
+ /**
+ * Retrieve all {@link IComponent} instances registered to this manager.
+ *
+ * @return all registered {@link IComponent} instances
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ */
+ Collection getRegisteredComponents() throws DivideNotInitializedException;
+
+ /**
+ * Retrieve {@link IComponent} with the given ID that is registered
+ * to this manager.
+ *
+ * @param id ID of the {@link IComponent} to retrieve
+ * @return the {@link IComponent} registered to this manager with the
+ * given ID (null if no component with that ID is registered)
+ * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver,
+ * IKnowledgeBase, Model, boolean, boolean, boolean)}
+ * has not been called yet
+ */
+ IComponent getRegisteredComponentById(String id) throws DivideNotInitializedException;
+
+ /**
+ * Retrieve the {@link IDivideQueryParser} of this DIVIDE engine.
+ *
+ * @return the DIVIDE query parser of this DIVIDE engine
+ */
+ IDivideQueryParser getQueryParser();
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriver.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriver.java
new file mode 100644
index 0000000..ee9b3de
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriver.java
@@ -0,0 +1,128 @@
+package be.ugent.idlab.divide.core.engine;
+
+import be.ugent.idlab.divide.core.context.Context;
+import be.ugent.idlab.divide.core.exception.DivideInitializationException;
+import be.ugent.idlab.divide.core.exception.DivideInvalidInputException;
+import be.ugent.idlab.divide.core.exception.DivideNotInitializedException;
+import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.core.query.parser.IDivideQueryParser;
+import org.apache.jena.rdf.model.Model;
+
+/**
+ * Class responsible for deriving the actual RSP-QL queries from an {@link IDivideQuery}.
+ */
+public interface IDivideQueryDeriver {
+
+ /**
+ * Loads the ontology that needs to be used as input (TBox) for each query
+ * derivation performed by this query deriver.
+ *
+ * If an ontology has been loaded successfully in the past at least once,
+ * this method will reload the ontology based on the new input. If something
+ * goes wrong during this reloading and an exception is thrown, the query
+ * deriver should still be in a valid state and continue working with the
+ * latest successfully loaded ontology.
+ *
+ * @param ontology representation of the ontology that should be used as TBox
+ * during the DIVIDE query derivation
+ * @throws DivideInvalidInputException when the ontology contains invalid statements, i.e.,
+ * statements which cannot be loaded by the query deriver
+ * @throws DivideInitializationException when something goes wrong during the loading of the
+ * ontology files, which prevents the instance from
+ * performing the query derivation
+ */
+ void loadOntology(Model ontology)
+ throws DivideInvalidInputException, DivideInitializationException;
+
+ /**
+ * Register a new {@link IDivideQuery} to this query deriver, to prepare
+ * the engine for deriving the RSP-QL queries from this DIVIDE query.
+ *
+ * @param divideQuery {@link IDivideQuery} to be registered to this query deriver
+ * (if null or if a DIVIDE query with the given name is already
+ * registered to the engine, nothing is changed)
+ * @param queryParser {@link IDivideQueryParser} to be used when the query deriver wants
+ * to parse the context-enriching queries of the DIVIDE query to possibly
+ * manipulate the context enrichment
+ * @throws DivideQueryDeriverException when something goes wrong during the registration
+ * of the new DIVIDE query, which prevents the instance
+ * from performing the query derivation for this query
+ * @throws DivideInvalidInputException when the given DIVIDE query has invalid fields
+ */
+ void registerQuery(IDivideQuery divideQuery,
+ IDivideQueryParser queryParser)
+ throws DivideQueryDeriverException, DivideInvalidInputException;
+
+ /**
+ * Unregister a {@link IDivideQuery} from this query deriver.
+ * In this way, this query deriver knows it will no longer need to derive
+ * RSP-QL queries from this DIVIDE query, which means it can clean up any
+ * resources related to this DIVIDE query.
+ *
+ * @param divideQuery {@link IDivideQuery} to be unregistered from this query deriver
+ * (if null or if no DIVIDE query with the given name is registered
+ * to the engine, nothing is changed)
+ */
+ void unregisterQuery(IDivideQuery divideQuery);
+
+ /**
+ * Performs the actual query derivation for the {@link IDivideQuery} with the given name,
+ * if such a DIVIDE query is registered to this query deriver.
+ * Runs the query derivation with the loaded ontology (TBox) and the passed context (ABox),
+ * outputting a query deriver result containing a list of RSP-QL queries that should be
+ * registered on the component with the passed ID given the new (passed) context.
+ *
+ * @param divideQueryName name of the {@link IDivideQuery} to be used for the query derivation
+ * (if no DIVIDE query with this name is registered, nothing is done and
+ * an empty list is returned)
+ * @param context new context for a certain component that should be used as input for
+ * the query derivation
+ * @param componentId ID of the component for which this query derivation is run
+ * @return a query deriver result, containing a method to retrieve a list of RSP-QL queries
+ * derived from the given DIVIDE query (can be of any length),
+ * which should be registered on the component with the passed ID
+ * @throws DivideQueryDeriverException when something goes wrong during the derivation of
+ * the RSP-QL queries
+ * @throws DivideNotInitializedException if {@link #loadOntology(Model)} has not been called yet
+ */
+ IDivideQueryDeriverResult deriveQueries(String divideQueryName,
+ Context context,
+ String componentId)
+ throws DivideQueryDeriverException, DivideNotInitializedException;
+
+ /**
+ * Substitutes new window parameters in a previous result of running the query derivation
+ * via the {@link #deriveQueries(String, Context, String)} method.
+ * These new window parameters can for example be imposed by a monitor component.
+ * This methods does not perform the actual query derivation for the {@link IDivideQuery}
+ * with the given name, but redoes the final part of the query derivation where the
+ * window parameters for this query are substituted in the derived queries. The window
+ * parameters that should be used are passed to this method.
+ *
+ * @param divideQueryName name of the {@link IDivideQuery} to be used for the query derivation
+ * (if no DIVIDE query with this name is registered, nothing is done and
+ * an empty list is returned)
+ * @param windowParameters description of the new window parameters for the stream(s) defined in
+ * the RSP-QL query body pattern of the given DIVIDE query (if window
+ * parameter variables occur in the query pattern that are not redefined
+ * by the monitor, the statically defined window parameters will be used
+ * instead)
+ * TODO: ensure all window parameters are redefined, or maybe
+ * rework this part so that first the thingy still looks at the
+ * dynamically defined window parameters
+ * @param componentId ID of the component for which this window parameter substitution is run
+ * @return a new query deriver result, containing a method to retrieve the list of updated
+ * RSP-QL queries with the new window parameters substituted into,
+ * which should be registered on the component with the passed ID
+ * @throws DivideQueryDeriverException when something goes wrong during the process of generating
+ * the new RSP-QL queries
+ * @throws DivideNotInitializedException if {@link #loadOntology(Model)} has not been called yet
+ */
+ IDivideQueryDeriverResult substituteWindowParameters(String divideQueryName,
+ Model windowParameters,
+ String componentId,
+ IDivideQueryDeriverResult lastResult)
+ throws DivideQueryDeriverException, DivideNotInitializedException;
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriverResult.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriverResult.java
new file mode 100644
index 0000000..7ca14a5
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriverResult.java
@@ -0,0 +1,13 @@
+package be.ugent.idlab.divide.core.engine;
+
+import java.util.List;
+
+public interface IDivideQueryDeriverResult {
+
+ /**
+ * @return a list of substituted RSP-QL queries being the result of performing
+ * the DIVIDE query derivation and/or window parameter substitution
+ */
+ List getSubstitutedRspQlQueries();
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryUpdateTask.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryUpdateTask.java
new file mode 100644
index 0000000..c6d5cd5
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryUpdateTask.java
@@ -0,0 +1,13 @@
+package be.ugent.idlab.divide.core.engine;
+
+public interface IDivideQueryUpdateTask {
+
+ /**
+ * Execute this query update task.
+ *
+ * @return true if the update thread on which this task is running was
+ * interrupted during the execution of this task, false otherwise
+ */
+ boolean execute();
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleContextEnricherUpdater.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleContextEnricherUpdater.java
new file mode 100644
index 0000000..3506486
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleContextEnricherUpdater.java
@@ -0,0 +1,124 @@
+package be.ugent.idlab.divide.core.engine;
+
+import be.ugent.idlab.divide.core.component.IComponent;
+import be.ugent.idlab.divide.core.context.ContextEnricherFactory;
+import be.ugent.idlab.divide.core.context.IContextEnricher;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Runnable to be used when the {@link IContextEnricher} associated to a given
+ * DIVIDE {@link IComponent} and {@link IDivideQuery} should be updated such that
+ * a {@link IContextEnricher} is created if it did not exist yet, and that the
+ * given {@link DivideOntology} is registered on this context enricher.
+ *
+ * This can be parallelized with doing the same task for the other DIVIDE queries
+ * registered at the same component (if this task is also required for these queries).
+ */
+class SingleContextEnricherUpdater implements Runnable {
+
+ private static final Logger LOGGER =
+ LoggerFactory.getLogger(SingleContextEnricherUpdater.class.getName());
+
+ private final IComponent component;
+ private final IDivideQuery divideQuery;
+ private final DivideOntology divideOntology;
+ private final CountDownLatch latch;
+
+ /**
+ * Creates a runnable of which the {@link #run()} method should ensure that
+ * a {@link IContextEnricher} is associated to the given {@link IComponent} and
+ * {@link IDivideQuery}, and that this context enricher has the given
+ * {@link DivideOntology} registered.
+ * This means that this runnable will create a context enricher if no context
+ * enricher is associated yet to the given pair of {@link IComponent} and
+ * {@link IDivideQuery}, and that the given {@link DivideOntology} is registered
+ * if no or another ontology is currently registered to this context enricher.
+ *
+ * When the task of this runnable is finished, i.e., at the end of the
+ * {@link #run()} method, the given {@link CountDownLatch} should be decremented.
+ *
+ * @param component component of which this runnable should check the associated
+ * context enricher
+ * @param divideQuery DIVIDE query of which this runnable should check the
+ * associated context enricher
+ * @param divideOntology DIVIDE ontology that should be registered at the context
+ * enricher associated to the given component & DIVIDE query
+ * @param latch latch to be decremented when this runnable finishes its job
+ */
+ SingleContextEnricherUpdater(IComponent component,
+ IDivideQuery divideQuery,
+ DivideOntology divideOntology,
+ CountDownLatch latch) {
+ this.component = component;
+ this.divideQuery = divideQuery;
+ this.divideOntology = divideOntology;
+ this.latch = latch;
+ }
+
+ /**
+ * Creates a runnable of which the {@link #run()} method should ensure that
+ * a {@link IContextEnricher} is associated to the given {@link IComponent} and
+ * {@link IDivideQuery}, and that this context enricher has the given
+ * {@link DivideOntology} registered.
+ * This means that this runnable will create a context enricher if no context
+ * enricher is associated yet to the given pair of {@link IComponent} and
+ * {@link IDivideQuery}, and that the given {@link DivideOntology} is registered
+ * if no or another ontology is currently registered to this context enricher.
+ *
+ * @param component component of which this runnable should check the associated
+ * context enricher
+ * @param divideQuery DIVIDE query of which this runnable should check the
+ * associated context enricher
+ * @param divideOntology DIVIDE ontology that should be registered at the context
+ * enricher associated to the given component & DIVIDE query
+ */
+ SingleContextEnricherUpdater(IComponent component,
+ IDivideQuery divideQuery,
+ DivideOntology divideOntology) {
+ this.component = component;
+ this.divideQuery = divideQuery;
+ this.divideOntology = divideOntology;
+ this.latch = null;
+ }
+
+ @Override
+ public void run() {
+ LOGGER.info("Updating context enricher for component with ID '{}' and DIVIDE " +
+ "query with name '{}', and DIVIDE ontology with ID '{}'",
+ component.getId(), divideQuery.getName(), divideOntology.getId());
+
+ // check if a context enricher is already registered for the given
+ // combination of DIVIDE component and DIVIDE query
+ IContextEnricher contextEnricher = component.getContextEnricher(divideQuery);
+
+ // -> if not, action should be taken
+ if (contextEnricher == null) {
+ LOGGER.info("Creating context enricher for component with ID '{}' and DIVIDE " +
+ "query with name '{}' (none exists yet)",
+ component.getId(), divideQuery.getName());
+
+ // first create a new context enricher
+ contextEnricher = ContextEnricherFactory.createInstance(
+ divideQuery.getContextEnrichment(),
+ component.getId());
+
+ // register the context enricher for the DIVIDE query to the component
+ component.registerContextEnricher(divideQuery, contextEnricher);
+ }
+
+ // register the ontology triples & rules to the context enricher
+ // -> if needed, an inference model can be built in parallel
+ contextEnricher.registerOntology(divideOntology);
+
+ // if a latch is specified, count it down to let the calling thread
+ // know that this updating task has finished
+ if (latch != null) {
+ latch.countDown();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleQueryDeriver.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleQueryDeriver.java
new file mode 100644
index 0000000..db3fe55
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleQueryDeriver.java
@@ -0,0 +1,124 @@
+package be.ugent.idlab.divide.core.engine;
+
+import be.ugent.idlab.divide.core.component.IComponent;
+import be.ugent.idlab.divide.core.context.Context;
+import be.ugent.idlab.divide.core.context.IContextEnricher;
+import be.ugent.idlab.divide.core.query.IDivideQuery;
+import be.ugent.idlab.divide.util.LogConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * Runnable which has the task of performing the derivation of a single
+ * {@link IDivideQuery} on the context associated to a certain {@link IComponent}.
+ *
+ * This can be parallelized with doing the same task for the other DIVIDE queries
+ * registered at the same component (if this task is also required for these queries).
+ */
+class SingleQueryDeriver implements Runnable {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(SingleQueryDeriver.class.getName());
+
+ private final IDivideQuery divideQuery;
+ private final Context context;
+ private final IComponent component;
+ private final IDivideQueryDeriver divideQueryDeriver;
+ private final DivideOntology divideOntology;
+ private final CountDownLatch latch;
+
+ SingleQueryDeriver(IDivideQuery divideQuery,
+ Context context,
+ IComponent component,
+ IDivideQueryDeriver divideQueryDeriver,
+ DivideOntology divideOntology,
+ CountDownLatch latch) {
+ this.divideQuery = divideQuery;
+ this.context = context;
+ this.component = component;
+ this.divideQueryDeriver = divideQueryDeriver;
+ this.divideOntology = divideOntology;
+ this.latch = latch;
+ }
+
+ @Override
+ public void run() {
+ long start = System.currentTimeMillis();
+
+ LOGGER.info("Running DIVIDE query derivation for query '{}' (for component with ID '{}'," +
+ "and context '{}'))",
+ divideQuery.getName(), component.getId(), context.getId());
+
+ try {
+ // run context enricher updater runnable in this thread to ensure
+ // that a context enricher exists for the given combination of DIVIDE
+ // component and DIVIDE query, and that the given ontology is registered
+ // to the context enricher
+ SingleContextEnricherUpdater contextEnricherUpdater =
+ new SingleContextEnricherUpdater(
+ component, divideQuery, divideOntology);
+ contextEnricherUpdater.run();
+
+ // copy context for this DIVIDE query (to avoid overlap)
+ Context copiedContext = context.copy();
+ LOGGER.info("Running DIVIDE query derivation for query '{}' (for component with ID '{}'): " +
+ "copy context '{}' to new context '{}'",
+ divideQuery.getName(), component.getId(), context.getId(), copiedContext.getId());
+
+ // then first enrich the context with the context enricher registered
+ // at the given DIVIDE component for the given DIVIDE query
+ IContextEnricher contextEnricher = component.getContextEnricher(divideQuery);
+ contextEnricher.enrichContext(copiedContext);
+
+ // derive all query instances for the given DIVIDE query name and up-to-date context
+ // -> what about the exceptions?
+ // * DivideNotInitializedException is impossible: this is only called if an IRI
+ // for a specific component is updated, and components cannot be registered
+ // to the DIVIDE engine if it has not been initialized
+ // * DivideQueryDeriverException: is possible if issues occur in the EYE reasoning
+ // scripts; real EYE errors are unlikely since all input is valid by definition
+ // (is either query fields which are validated upon registration of a query,
+ // controlled static inputs of DIVIDE which are known to be valid, or outputs of
+ // previous reasoning steps); I/O errors can of course never be ruled out
+ // * other possible unchecked exceptions: always possible
+ // -> any exception should ALWAYS be caught and ignored, since otherwise the query
+ // update processing queue of this component will block FOREVER (since it is
+ // waiting for each started thread, including this one, to count down the latch)
+ // => whatever the exception is, this thread should simply stop and count down
+ // the latch, without having scheduled any queries for registration at the
+ // RSP engine handler
+ IDivideQueryDeriverResult divideQueryDeriverResult = divideQueryDeriver.deriveQueries(
+ divideQuery.getName(), copiedContext, component.getId());
+ List substitutedQueries = divideQueryDeriverResult.getSubstitutedRspQlQueries();
+
+ // schedule each new query for registration
+ for (String query : substitutedQueries) {
+ component.getRspEngineHandler().scheduleForRegistration(query, divideQuery);
+ }
+
+ LOGGER.info("Finished DIVIDE query derivation for query '{}' in {} milliseconds" +
+ " (for component with ID '{}', and context '{}')",
+ divideQuery.getName(), System.currentTimeMillis() - start,
+ component.getId(), copiedContext.getId());
+
+ } catch (Exception e) {
+ LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER,
+ "Error during the DIVIDE query derivation for query '{}' " +
+ "(for component with ID '{}', and context '{}')",
+ divideQuery.getName(), component.getId(), context.getId(), e);
+ // TODO: 29/01/2021 do something with fact that not all required queries
+ // might be running? (I think an important part of the monitor will be monitoring
+ // for errors and send these errors on the monitoring stream so that depending on
+ // the use case, action can be taken when such an event occurs)
+
+ } finally {
+ // whatever happens along the way, count down latch at the end so
+ // the main query derivation thread (in DivideEngine class) is not
+ // blocked forever
+ latch.countDown();
+ }
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideException.java
new file mode 100644
index 0000000..5f89287
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideException.java
@@ -0,0 +1,20 @@
+package be.ugent.idlab.divide.core.exception;
+
+/**
+ * General exception describing known DIVIDE errors.
+ */
+public abstract class DivideException extends Exception {
+
+ public DivideException(String description, Exception base) {
+ super(description, base);
+ }
+
+ public DivideException(String description) {
+ super(description);
+ }
+
+ public DivideException(Exception base) {
+ super(base);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInitializationException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInitializationException.java
new file mode 100644
index 0000000..9201f25
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInitializationException.java
@@ -0,0 +1,19 @@
+package be.ugent.idlab.divide.core.exception;
+
+/**
+ * Exception thrown when an error occurs during the initialization of a
+ * DIVIDE object, which causes the object to not be correctly initialized,
+ * and therefore prevents this object from functioning as it should.
+ */
+@SuppressWarnings("unused")
+public class DivideInitializationException extends DivideException {
+
+ public DivideInitializationException(String description, Exception base) {
+ super(description, base);
+ }
+
+ public DivideInitializationException(String description) {
+ super(description);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInvalidInputException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInvalidInputException.java
new file mode 100644
index 0000000..92cb5b2
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInvalidInputException.java
@@ -0,0 +1,18 @@
+package be.ugent.idlab.divide.core.exception;
+
+/**
+ * Exception thrown when an error occurs because the input provided to a DIVIDE
+ * object (from the outside) is invalid.
+ */
+@SuppressWarnings("unused")
+public class DivideInvalidInputException extends DivideException {
+
+ public DivideInvalidInputException(String description, Exception base) {
+ super(description, base);
+ }
+
+ public DivideInvalidInputException(String description) {
+ super(description);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideNotInitializedException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideNotInitializedException.java
new file mode 100644
index 0000000..0b3ceb4
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideNotInitializedException.java
@@ -0,0 +1,27 @@
+package be.ugent.idlab.divide.core.exception;
+
+/**
+ * Exception thrown when a method of a DIVIDE object is called when it has not
+ * been initialized, and this object should be initialized first before this
+ * method can be called.
+ */
+@SuppressWarnings("unused")
+public class DivideNotInitializedException extends DivideException {
+
+ public DivideNotInitializedException(String description, Exception base) {
+ super(description, base);
+ }
+
+ public DivideNotInitializedException(String description) {
+ super(description);
+ }
+
+ public DivideNotInitializedException(Exception base) {
+ super("DIVIDE engine has not been initialized", base);
+ }
+
+ public DivideNotInitializedException() {
+ super("DIVIDE engine has not been initialized");
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideQueryDeriverException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideQueryDeriverException.java
new file mode 100644
index 0000000..ad212df
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideQueryDeriverException.java
@@ -0,0 +1,24 @@
+package be.ugent.idlab.divide.core.exception;
+
+/**
+ * Exception thrown when an error occurs related to the query derivation process
+ * of DIVIDE. This can be during the query derivation, but also during the
+ * registration of DIVIDE queries in preparation of the query derivation for
+ * these DIVIDE queries.
+ */
+@SuppressWarnings("unused")
+public class DivideQueryDeriverException extends DivideException {
+
+ public DivideQueryDeriverException(String description, Exception base) {
+ super(description, base);
+ }
+
+ public DivideQueryDeriverException(String description) {
+ super(description);
+ }
+
+ public DivideQueryDeriverException(Exception base) {
+ super(base);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQuery.java
new file mode 100644
index 0000000..b6044a2
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQuery.java
@@ -0,0 +1,74 @@
+package be.ugent.idlab.divide.core.query;
+
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+
+import java.util.Objects;
+
+class DivideQuery implements IDivideQuery {
+
+ private final String name;
+ private final String queryPattern;
+ private final String sensorQueryRule;
+ private final String goal;
+ private ContextEnrichment contextEnrichment;
+
+ DivideQuery(String name,
+ String queryPattern,
+ String sensorQueryRule,
+ String goal,
+ ContextEnrichment contextEnrichment) {
+ this.name = name;
+ this.queryPattern = queryPattern;
+ this.sensorQueryRule = sensorQueryRule;
+ this.goal = goal;
+ this.contextEnrichment = contextEnrichment;
+ }
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+ @Override
+ public String getQueryPattern() {
+ return queryPattern;
+ }
+
+ @Override
+ public String getSensorQueryRule() {
+ return sensorQueryRule;
+ }
+
+ @Override
+ public String getGoal() {
+ return goal;
+ }
+
+ @Override
+ public ContextEnrichment getContextEnrichment() {
+ return contextEnrichment;
+ }
+
+ @Override
+ public void removeContextEnrichment() {
+ this.contextEnrichment = new ContextEnrichment();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ DivideQuery that = (DivideQuery) o;
+ return name.equals(that.name);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQueryFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQueryFactory.java
new file mode 100644
index 0000000..8554396
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQueryFactory.java
@@ -0,0 +1,27 @@
+package be.ugent.idlab.divide.core.query;
+
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+
+public class DivideQueryFactory {
+
+ /**
+ * Create a new DIVIDE query with the given parameters.
+ *
+ * @param queryName name of the DIVIDE query
+ * @param queryPattern generic query pattern used during query derivation
+ * @param sensorQueryRule sensor query rule used during query derivation
+ * @param goal goal used during query derivation
+ * @param contextEnrichment the context enrichment to be used at the start
+ * of the query derivation
+ * @return created DIVIDE query
+ */
+ public static IDivideQuery createInstance(String queryName,
+ String queryPattern,
+ String sensorQueryRule,
+ String goal,
+ ContextEnrichment contextEnrichment) {
+ return new DivideQuery(
+ queryName, queryPattern, sensorQueryRule, goal, contextEnrichment);
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/IDivideQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/IDivideQuery.java
new file mode 100644
index 0000000..f666cdb
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/IDivideQuery.java
@@ -0,0 +1,25 @@
+package be.ugent.idlab.divide.core.query;
+
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+
+/**
+ * Representation of a generic query within DIVIDE, of which specific initialized
+ * query instances can be derived.
+ * It has a name, a query pattern (which needs to be substituted),
+ * a sensor query rule used for the query derivation, and a goal used for the query derivation.
+ */
+public interface IDivideQuery {
+
+ String getName();
+
+ String getQueryPattern();
+
+ String getSensorQueryRule();
+
+ String getGoal();
+
+ ContextEnrichment getContextEnrichment();
+
+ void removeContextEnrichment();
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/CleanDivideQueryParserInput.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/CleanDivideQueryParserInput.java
new file mode 100644
index 0000000..b612ce4
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/CleanDivideQueryParserInput.java
@@ -0,0 +1,71 @@
+package be.ugent.idlab.divide.core.query.parser;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class CleanDivideQueryParserInput extends DivideQueryParserInput {
+
+ private final Map variableMapping;
+ private final Map reverseVariableMapping;
+ private Set unboundVariables;
+ private Map finalQueryVariableMapping;
+
+ CleanDivideQueryParserInput(InputQueryLanguage inputQueryLanguage,
+ List streamWindows,
+ String streamQuery,
+ List intermediateQueries,
+ String finalQuery,
+ String solutionModifier,
+ Map variableMapping) {
+ super(inputQueryLanguage, streamWindows, streamQuery, intermediateQueries,
+ finalQuery, solutionModifier, null);
+
+ this.variableMapping = variableMapping;
+ this.reverseVariableMapping = new HashMap<>();
+ this.variableMapping.forEach((k, v) -> this.reverseVariableMapping.put(v, k));
+
+ this.finalQueryVariableMapping = new HashMap<>();
+ }
+
+ CleanDivideQueryParserInput(DivideQueryParserInput input) {
+ super(input.getInputQueryLanguage(),
+ input.getStreamWindows(),
+ input.getStreamQuery(),
+ input.getIntermediateQueries(),
+ input.getFinalQuery(),
+ input.getSolutionModifier(),
+ input.getStreamToFinalQueryVariableMapping());
+
+ this.variableMapping = new HashMap<>();
+ this.reverseVariableMapping = new HashMap<>();
+
+ this.finalQueryVariableMapping = new HashMap<>();
+ }
+
+ Map getVariableMapping() {
+ return variableMapping;
+ }
+
+ public Map getReverseVariableMapping() {
+ return reverseVariableMapping;
+ }
+
+ void setUnboundVariables(Set unboundVariables) {
+ this.unboundVariables = unboundVariables;
+ }
+
+ Set getUnboundVariables() {
+ return unboundVariables;
+ }
+
+ public void setFinalQueryVariableMapping(Map finalQueryVariableMapping) {
+ this.finalQueryVariableMapping = finalQueryVariableMapping;
+ }
+
+ public Map getFinalQueryVariableMapping() {
+ return finalQueryVariableMapping;
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ConvertedStreamWindow.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ConvertedStreamWindow.java
new file mode 100644
index 0000000..9ae9a3b
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ConvertedStreamWindow.java
@@ -0,0 +1,31 @@
+package be.ugent.idlab.divide.core.query.parser;
+
+import java.util.List;
+
+public class ConvertedStreamWindow extends StreamWindow {
+
+ private final List windowParameters;
+
+ public ConvertedStreamWindow(String streamIri,
+ String windowDefinition,
+ List windowParameters) {
+ super(streamIri, windowDefinition);
+
+ this.windowParameters = windowParameters;
+ }
+
+ public List getWindowParameters() {
+ return windowParameters;
+ }
+
+ @Override
+ public String toString() {
+ return "ConvertedStreamWindow{" +
+ "windowParameters=" + windowParameters +
+ ", streamIri='" + streamIri + '\'' +
+ ", windowDefinition='" + windowDefinition + '\'' +
+ ", defaultWindowParameterValues=" + defaultWindowParameterValues +
+ '}';
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryGenerator.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryGenerator.java
new file mode 100644
index 0000000..a3a8367
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryGenerator.java
@@ -0,0 +1,504 @@
+package be.ugent.idlab.divide.core.query.parser;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+class DivideQueryGenerator {
+
+ static long COUNTER = 1;
+
+ private static final Set DIVIDE_PREFIXES = new HashSet<>();
+ private static final Set DIVIDE_PREFIX_NAMES;
+ private static final Map DIVIDE_PREFIX_MAP;
+
+ static {
+ DIVIDE_PREFIXES.add(new Prefix(":", ""));
+ DIVIDE_PREFIXES.add(new Prefix("sd:", ""));
+ DIVIDE_PREFIXES.add(new Prefix("sh:", ""));
+ DIVIDE_PREFIXES.add(new Prefix("owl:", ""));
+ DIVIDE_PREFIXES.add(new Prefix("rdf:", ""));
+ DIVIDE_PREFIXES.add(new Prefix("xsd:", ""));
+
+ DIVIDE_PREFIX_MAP = new HashMap<>();
+ for (Prefix dividePrefix : DIVIDE_PREFIXES) {
+ DIVIDE_PREFIX_MAP.put(dividePrefix.getName(), dividePrefix);
+ }
+
+ DIVIDE_PREFIX_NAMES = DIVIDE_PREFIX_MAP.keySet();
+ }
+
+ private static final String TURTLE_PREFIX_TEMPLATE = "@prefix %s %s .";
+
+ private static final String SHACL_PREFIX_DECLARATION_TEMPLATE =
+ ":prefixes-%d sh:declare [ sh:prefix \"%s\" ; sh:namespace \"%s\"^^xsd:anyURI ] .";
+
+ private static final String QUERY_PATTERN_TEMPLATE =
+ "%s\n" + ":prefixes-%d rdf:type owl:Ontology .\n%s\n" +
+ ":pattern rdf:type sd:QueryPattern ; " +
+ "sh:prefixes :prefixes-%d ; sh:%s \"\"\"%s\"\"\".";
+
+ private static final String SENSOR_QUERY_RULE_TEMPLATE =
+ "%s\n" +
+ "{\n%s\n}\n=>\n{\n" +
+ "_:q rdf:type sd:Query ;\n" +
+ " sd:pattern :pattern ;\n" +
+ " sd:inputVariables (%s) ;\n" +
+ " sd:windowParameters (%s) ;\n" +
+ " sd:outputVariables (%s) .\n" +
+ "\n%s\n} .";
+
+ private static final String SENSOR_QUERY_RULE_ADDITIONAL_RULE_TEMPLATE =
+ "{\n%s\n}\n=>\n{\n%s\n} .";
+
+ private static final String SENSOR_QUERY_RULE_INPUT_OUTPUT_VARIABLE_TEMPLATE = "(\"%s\" %s)";
+
+ private static final String SENSOR_QUERY_RULE_WINDOW_PARAMETER_TEMPLATE = "(\"%s\" %s %s)";
+
+ private static final Map
+ windowParameterTypeMapping = new HashMap<>();
+ static {
+ windowParameterTypeMapping.put(WindowParameter.WindowParameterType.XSD_DURATION,
+ "");
+ windowParameterTypeMapping.put(WindowParameter.WindowParameterType.TIME_SECONDS,
+ "");
+ windowParameterTypeMapping.put(WindowParameter.WindowParameterType.TIME_MINUTES,
+ "");
+ windowParameterTypeMapping.put(WindowParameter.WindowParameterType.TIME_HOURS,
+ "");
+ }
+
+ private static final String GOAL_TEMPLATE = "%s\n{\n%s\n}\n=>\n{\n%s\n} .";
+
+ private static final String RSP_QL_QUERY_BODY_TEMPLATE = "%s\n%s\n%s\nWHERE {\n%s\n}\n%s";
+
+ private static final String RSP_QL_QUERY_BODY_FROM_TEMPLATE =
+ "FROM NAMED WINDOW :win%d ON %s [%s]";
+
+ private static final String RSP_QL_QUERY_BODY_WHERE_GRAPH_TEMPLATE = "WINDOW :win%d {\n%s\n}";
+
+ /**
+ * @param queryForm query form of the RSP-QL query template for which
+ * this pattern is created
+ * @param prefixes set of prefixes used in the RSP-QL query body
+ * @param rspQlQueryBody RSP-QL query body of the query template for which
+ * this pattern is created; this should be the output
+ * of the {@link #createRspQlQueryBody(QueryForm, String,
+ * List, String, List, DivideQueryParser)} method
+ *
+ * @return query pattern of the DIVIDE query
+ */
+ String createQueryPattern(QueryForm queryForm,
+ Set prefixes,
+ String rspQlQueryBody) {
+ Set dividePrefixes = new HashSet<>(DIVIDE_PREFIXES);
+
+ // loop over prefixes
+ Set prefixesPresent = new HashSet<>();
+ for (Prefix prefix : prefixes) {
+ if (Pattern.compile("(\\s|\\(|^|\\^)" + prefix.getName() + "(?!win[0-9]+\\s)")
+ .matcher(rspQlQueryBody).find()) {
+ if (":".equals(prefix.getName())) {
+ // a prefix without a name cannot be defined in SHACL, so should
+ // be replaced with a DIVIDE prefix
+ Prefix newPrefix = new Prefix(
+ String.format("divide-%s:", UUID.randomUUID()),
+ prefix.getUri());
+
+ // update prefix set
+ prefixesPresent.add(newPrefix);
+
+ // update RSP-QL query body according to new prefix
+ Pattern replacingPattern = Pattern.compile("(\\s|\\(|^|\\^):(?!win[0-9]+\\s)");
+ Matcher m = replacingPattern.matcher(rspQlQueryBody);
+ rspQlQueryBody = m.replaceAll("$1" + newPrefix.getName());
+
+ } else {
+ // only include in prefix set if prefix occurs in RSP-QL query body
+ prefixesPresent.add(prefix);
+ }
+ }
+ }
+
+ // update DIVIDE prefixes and template if prefix conflicts exist
+ List templates = new ArrayList<>();
+ templates.add(QUERY_PATTERN_TEMPLATE);
+ templates.add(SHACL_PREFIX_DECLARATION_TEMPLATE);
+ templates = solveConflictsWithDividePrefixes(templates, prefixesPresent, dividePrefixes);
+
+ return String.format(templates.get(0),
+ getTurtlePrefixList(dividePrefixes),
+ COUNTER,
+ getShaclPrefixList(prefixesPresent, templates.get(1)),
+ COUNTER,
+ queryForm.toString().toLowerCase(),
+ rspQlQueryBody);
+ }
+
+ /**
+ * @param prefixes set of prefixes used in the sensor query rule content
+ * (both context part and stream query result, and in possible
+ * additional queries)
+ * @param contextPart context part of stream query which is used as antecedent
+ * of the sensor query rule
+ * @param streamQueryResult stream query result which is part of the consequence
+ * of the sensor query rule
+ * @param inputVariables input variables from the antecedent that need to be substituted
+ * into the consequence (including pattern) and therefore need to
+ * be defined as input variables in this sensor query rule
+ * @param outputVariables output variables in the stream query result (i.e., variables
+ * not occurring in the antecedent) that therefore need to be
+ * substituted into blank nodes in the consequence of the created
+ * sensor query rule
+ * @param additionalQueries parsed additional SPARQL queries that are executed
+ * between the first stream-dependent query and the final
+ * query yielding the query result
+ *
+ * @return sensor query rule for the DIVIDE query, extended with an addition rule
+ * for each additional query (if existing)
+ */
+ String createSensorQueryRule(Set prefixes,
+ String contextPart,
+ String streamQueryResult,
+ List inputVariables,
+ List windowParameters,
+ List outputVariables,
+ List additionalQueries) {
+ Set dividePrefixes = new HashSet<>(DIVIDE_PREFIXES);
+
+ // update DIVIDE prefixes and template if prefix conflicts exist
+ List templates = Collections.singletonList(SENSOR_QUERY_RULE_TEMPLATE);
+ templates = solveConflictsWithDividePrefixes(templates, prefixes, dividePrefixes);
+
+ // merge all prefixes
+ // (merging can happen without any issues since the documentation
+ // mentions that this method expects no overlap between the prefix
+ // sets)
+ Set allPrefixes = new HashSet<>(dividePrefixes);
+ allPrefixes.addAll(prefixes);
+
+ // generate string of input variables
+ String inputVariablesString = inputVariables
+ .stream()
+ .sorted((s1, s2) -> s1.contains(s2) ?
+ (s1.equals(s2) ? 0 : -1) :
+ (s2.contains(s1) ? 1 : s1.compareTo(s2)))
+ .map(s -> String.format(SENSOR_QUERY_RULE_INPUT_OUTPUT_VARIABLE_TEMPLATE, s, s))
+ .collect(Collectors.joining(" "));
+
+ // generate string of window variables
+ String windowVariablesString = windowParameters
+ .stream()
+ .map(s -> String.format(SENSOR_QUERY_RULE_WINDOW_PARAMETER_TEMPLATE,
+ s.getVariable(),
+ !s.isValueSubstitutionVariable() &&
+ s.getType() == WindowParameter.WindowParameterType.XSD_DURATION ?
+ "\"" + s.getValue() + "\"" : s.getValue(),
+ windowParameterTypeMapping.get(s.getType())))
+ .collect(Collectors.joining(" "));
+
+ // process all output variables
+ List outputVariablesList = new ArrayList<>();
+ for (String outputVariable : outputVariables) {
+ // create blank node for each output variable
+ String blank = outputVariable.replaceFirst(Pattern.quote("?"), "_:");
+
+ // generate string to add to list output variables
+ outputVariablesList.add(String.format(
+ SENSOR_QUERY_RULE_INPUT_OUTPUT_VARIABLE_TEMPLATE,
+ outputVariable, blank));
+
+ // replace output variable by its blank node in the stream query result,
+ // which ends up in the consequence of the sensor query rule
+ streamQueryResult = streamQueryResult.replaceAll(
+ Pattern.quote(outputVariable), blank);
+ }
+ String outputVariablesString = String.join(" ", outputVariablesList);
+
+ // create sensor query rule string
+ String sensorQueryRule = String.format(templates.get(0),
+ getTurtlePrefixList(allPrefixes),
+ contextPart,
+ inputVariablesString,
+ windowVariablesString,
+ outputVariablesString,
+ streamQueryResult);
+
+ // create additional rule string for each additional query
+ // (WHERE clause as antecedent, CONSTRUCT clause as consequence)
+ List additionalRules = new ArrayList<>();
+ for (ParsedSparqlQuery additionalQuery : additionalQueries) {
+ additionalRules.add(String.format(SENSOR_QUERY_RULE_ADDITIONAL_RULE_TEMPLATE,
+ additionalQuery.getSplitSparqlQuery().getWherePart(),
+ additionalQuery.getSplitSparqlQuery().getResultPart()));
+ }
+
+ // create the actual sensor query rule and append the additional rules to it
+ return String.format("%s\n\n%s",
+ sensorQueryRule,
+ String.join("\n\n", additionalRules));
+ }
+
+ /**
+ * @param prefixes set of prefixes used in the goal's antecedent and consequence
+ * @param antecedent antecedent of the rule that makes up the goal
+ * @param consequence consequence of the rule that makes up the goal
+ *
+ * @return goal for the DIVIDE query
+ */
+ String createGoal(Set prefixes,
+ String antecedent,
+ String consequence) {
+ String prefixString = String.join(" ", getTurtlePrefixList(prefixes));
+ return String.format(GOAL_TEMPLATE,
+ prefixString,
+ antecedent,
+ consequence);
+ }
+
+ /**
+ * @param queryForm form of the last query in the chain of input queries, that also
+ * needs to be used as form in the RSP-QL query body for the DIVIDE
+ * query (can either be CONSTRUCT, SELECT or ASK)
+ * @param queryOutput output of the last query in the chain of input queries, that
+ * also needs to be the output of the RSP-QL query body for the
+ * DIVIDE query
+ * @param whereClauseItems ordered list of WHERE clause items that are either graphs
+ * clauses on a stream IRI or SPARQL expressions; this list
+ * will be processed to generate the WHERE clause for the
+ * created RSP-QL query body; this list should contain at
+ * least 1 graph clause on a stream IRI
+ * @param solutionModifier solution modifier of the resulting RSP-QL query as defined
+ * in the input
+ * @param streamWindows stream windows defined in the parser input, which should contain
+ * an entry for each stream IRI specified in the graph WHERE clause
+ * items (together with the window parameters for this stream IRI)
+ *
+ * @return the RSP-QL query body to be used in the query pattern of the DIVIDE query
+ *
+ * @throws InvalidDivideQueryParserInputException if the stream windows list does not contain
+ * a stream window with a graph IRI that appears
+ * in the where clause graph items that make up
+ * the RSP-QL query body
+ */
+ RspQlQueryBody createRspQlQueryBody(QueryForm queryForm,
+ String queryOutput,
+ List whereClauseItems,
+ String solutionModifier,
+ List streamWindows,
+ DivideQueryParser parser)
+ throws InvalidDivideQueryParserInputException {
+ // create set of distinct stream graph names (IRIs) in the set of
+ // WHERE clause items
+ Set inputStreamGraphs = new HashSet<>();
+ for (WhereClauseItem whereClauseItem : whereClauseItems) {
+ if (whereClauseItem.getItemType() == WhereClauseItemType.GRAPH) {
+ WhereClauseGraphItem graphItem =
+ (WhereClauseGraphItem) whereClauseItem;
+ inputStreamGraphs.add(graphItem.getGraph().getName());
+ }
+ }
+
+ // keep track of to which window number the different stream graph
+ // names are mapped
+ Map streamGraphToWindowNumberMap = new HashMap<>();
+
+ // create FROM clauses
+ List fromParts = new ArrayList<>();
+ int windowCounter = 0;
+ for (String inputStreamGraph : inputStreamGraphs) {
+ // filter list of input stream windows with the window that
+ // has the same IRI (name)
+ Optional matchingWindow = streamWindows
+ .stream()
+ .filter(streamWindow -> streamWindow.getStreamIri().equals(inputStreamGraph))
+ .findFirst();
+
+ // if such a window is not present, an exception should be thrown,
+ // because then there is no input about the window parameters for
+ // this IRI
+ if (!matchingWindow.isPresent()) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Window parameters of input stream '%s' are not " +
+ "specified in input", inputStreamGraph));
+ }
+
+ // otherwise, the FROM clause of this window can be generated and the
+ // window number is saved to the map
+ // -> first, the unbound variables still need to be replaced in window
+ String windowDefinition = matchingWindow.get().getWindowDefinition();
+ fromParts.add(String.format(RSP_QL_QUERY_BODY_FROM_TEMPLATE,
+ windowCounter,
+ inputStreamGraph,
+ windowDefinition));
+ streamGraphToWindowNumberMap.put(inputStreamGraph, windowCounter++);
+ }
+ String fromPart = String.join("\n", fromParts);
+
+ // construct WHERE clause
+ StringBuilder whereClause = new StringBuilder();
+ if (inputStreamGraphs.size() == 1) {
+ // if there is only 1 input stream graph, all expressions in the WHERE
+ // clause items can be grouped under that same graph
+ String graphName = "";
+ for (WhereClauseItem whereClauseItem : whereClauseItems) {
+ if (whereClauseItem.getItemType() == WhereClauseItemType.EXPRESSION) {
+ WhereClauseExpressionItem expressionItem =
+ (WhereClauseExpressionItem) whereClauseItem;
+ whereClause.append(expressionItem.getExpression()).append(" ");
+
+ } else if (whereClauseItem.getItemType() == WhereClauseItemType.GRAPH) {
+ WhereClauseGraphItem graphItem =
+ (WhereClauseGraphItem) whereClauseItem;
+ whereClause.append(graphItem.getGraph().getClause()).append(" ");
+ graphName = graphItem.getGraph().getName();
+
+ }
+ }
+
+ // this means 1 graph pattern is created on the window with the correct number
+ // -> this makes up the whole WHERE clause of the query
+ whereClause = new StringBuilder(String.format(RSP_QL_QUERY_BODY_WHERE_GRAPH_TEMPLATE,
+ streamGraphToWindowNumberMap.get(graphName),
+ whereClause));
+
+ } else {
+ // if there is more than 1 input stream graph, all expressions in the WHERE
+ // clause items (that were not grouped under a graph, i.e., of item type EXPRESSION)
+ // are also appended to the WHERE clause in the same way (i.e., not under a graph)
+ List whereClauseParts = new ArrayList<>();
+ for (WhereClauseItem whereClauseItem : whereClauseItems) {
+ if (whereClauseItem.getItemType() == WhereClauseItemType.EXPRESSION) {
+ WhereClauseExpressionItem expressionItem =
+ (WhereClauseExpressionItem) whereClauseItem;
+ // so expression items are just added as such to the WHERE clause
+ whereClauseParts.add(expressionItem.getExpression());
+
+ } else if (whereClauseItem.getItemType() == WhereClauseItemType.GRAPH) {
+ WhereClauseGraphItem graphItem =
+ (WhereClauseGraphItem) whereClauseItem;
+ // graph items are added as a graph pattern on the window with the correct number
+ whereClauseParts.add(String.format(RSP_QL_QUERY_BODY_WHERE_GRAPH_TEMPLATE,
+ streamGraphToWindowNumberMap.get(graphItem.getGraph().getName()),
+ graphItem.getGraph().getClause()));
+
+ }
+ }
+ // in this case, the WHERE clause consists of the ordered string of all
+ // created individual parts
+ whereClause = new StringBuilder(String.join("\n", whereClauseParts));
+ }
+
+ // generate query body string
+ String queryFormString = queryForm == QueryForm.CONSTRUCT
+ ? String.format("{ %s }", queryOutput)
+ : (queryForm == QueryForm.ASK ? "" : queryOutput);
+ String queryBody = String.format(RSP_QL_QUERY_BODY_TEMPLATE,
+ queryForm.toString(),
+ queryFormString,
+ fromPart,
+ whereClause,
+ solutionModifier);
+
+ // collect all unbound variables in RSP-QL query body
+ // -> ignore from part when doing general search
+ // -> ignore unbound variables in stream windows since they will be used
+ // as window parameters instead of input variables
+ Set unboundVariables = new HashSet<>(
+ parser.findUnboundVariables(String.format(RSP_QL_QUERY_BODY_TEMPLATE,
+ queryForm,
+ queryFormString,
+ "",
+ whereClause,
+ solutionModifier)));
+
+ return new RspQlQueryBody(queryBody, unboundVariables, queryForm,
+ queryFormString, whereClause.toString());
+ }
+
+ /**
+ * Solves any conflicts with the set of used prefixes in the given template
+ * and with the given set of DIVIDE prefixes.
+ *
+ * @param templates templates to be checked
+ * @param usedPrefixes set of prefixes that is used, with which no conflicts
+ * may occur
+ * @param dividePrefixes set of prefixes that will be used for the DIVIDE IRIs
+ * in the given template; this set will be modified if
+ * any prefix conflicts occur (conflicting prefixes are
+ * then replaced by the new unambiguous ones)
+ * @return modified templates where any prefix conflicts are resolved, i.e.,
+ * where conflicting prefixes are replaced by an unambiguous new one
+ */
+ private List solveConflictsWithDividePrefixes(List templates,
+ Set usedPrefixes,
+ Set dividePrefixes) {
+ for (Prefix prefix : usedPrefixes) {
+ if (DIVIDE_PREFIX_NAMES.contains(prefix.getName())) {
+ // retrieve prefix
+ Prefix conflictingPrefix = DIVIDE_PREFIX_MAP.get(prefix.getName());
+
+ // it is only a real conflict if the URI differs
+ if (!prefix.getUri().equals(conflictingPrefix.getUri())) {
+ // create new prefix
+ Prefix newPrefix = new Prefix(
+ String.format("divide-%s:", UUID.randomUUID()),
+ conflictingPrefix.getUri());
+
+ // update prefix set
+ dividePrefixes.remove(conflictingPrefix);
+ dividePrefixes.add(newPrefix);
+
+ // update prefix template
+ List newTemplates = new ArrayList<>();
+ for (String template : templates) {
+ Pattern replacingPattern =
+ Pattern.compile("(\\s|\\(|^|\\^)" + conflictingPrefix.getName());
+ Matcher m = replacingPattern.matcher(template);
+ template = m.replaceAll("$1" + newPrefix.getName());
+ newTemplates.add(template);
+ }
+ templates = new ArrayList<>(newTemplates);
+ }
+ }
+ }
+ return templates;
+ }
+
+ String getTurtlePrefixList(Set prefixes) {
+ List turtlePrefixList = new ArrayList<>();
+ for (Prefix prefix : prefixes) {
+ turtlePrefixList.add(convertPrefixToTurtlePrefix(prefix));
+ }
+ return String.join(" ", turtlePrefixList);
+ }
+
+ private String getShaclPrefixList(Set prefixes, String template) {
+ List shaclPrefixList = new ArrayList<>();
+ for (Prefix prefix : prefixes) {
+ shaclPrefixList.add(convertPrefixToShaclPrefix(template, prefix));
+ }
+ return String.join("\n", shaclPrefixList);
+ }
+
+ private String convertPrefixToTurtlePrefix(Prefix prefix) {
+ return String.format(TURTLE_PREFIX_TEMPLATE, prefix.getName(), prefix.getUri());
+ }
+
+ private String convertPrefixToShaclPrefix(String template, Prefix prefix) {
+ return String.format(template,
+ COUNTER,
+ prefix.getName().substring(0, prefix.getName().length() - 1),
+ prefix.getUri().substring(1, prefix.getUri().length() - 1));
+ }
+
+}
diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParser.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParser.java
new file mode 100644
index 0000000..b88d254
--- /dev/null
+++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParser.java
@@ -0,0 +1,3317 @@
+package be.ugent.idlab.divide.core.query.parser;
+
+import be.ugent.idlab.divide.core.context.ContextEnrichingQuery;
+import be.ugent.idlab.divide.core.context.ContextEnrichment;
+import be.ugent.idlab.util.io.IOUtilities;
+import be.ugent.idlab.util.rdf.RDFLanguage;
+import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaUtilities;
+import org.apache.jena.atlas.lib.Pair;
+import org.apache.jena.query.Query;
+import org.apache.jena.query.QueryExecution;
+import org.apache.jena.query.QueryExecutionFactory;
+import org.apache.jena.query.QueryFactory;
+import org.apache.jena.query.QueryParseException;
+import org.apache.jena.rdf.model.Model;
+import org.apache.jena.rdf.model.ModelFactory;
+import org.apache.jena.sparql.syntax.ElementPathBlock;
+import org.apache.jena.sparql.syntax.ElementVisitorBase;
+import org.apache.jena.sparql.syntax.ElementWalker;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+public class DivideQueryParser implements IDivideQueryParser {
+
+ private static final boolean DEBUG = false;
+
+ private static int PREFIX_COUNTER = 0;
+
+ private static final Pattern PREFIX_PATTERN = Pattern.compile(
+ "(\\s*PREFIX\\s+(\\S+)\\s+(<[^<>]+>))", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern SPARQL_FROM_NAMED_GRAPH_PATTERN = Pattern.compile(
+ "\\s*FROM\\s+NAMED\\s+(\\S+)", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern SPARQL_FROM_DEFAULT_GRAPH_PATTERN = Pattern.compile(
+ "\\s*FROM\\s+(\\S+)", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern RSP_QL_FROM_NAMED_GRAPH_PATTERN = Pattern.compile(
+ "\\s*FROM\\s+NAMED\\s+GRAPH\\s+(\\S+)", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern RSP_QL_FROM_DEFAULT_GRAPH_PATTERN = Pattern.compile(
+ "\\s*FROM\\s+GRAPH\\s+(\\S+)", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern RSP_QL_FROM_NAMED_WINDOW_PATTERN = Pattern.compile(
+ "\\s*FROM\\s+NAMED\\s+WINDOW\\s+(\\S+)\\s+ON\\s+(\\S+)\\s+\\[([^\\[\\]]+)]",
+ Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern RSP_QL_WINDOW_PARAMETERS_PATTERN = Pattern.compile(
+ "\\s*((RANGE\\s+(\\S+))|(FROM\\s+NOW-(\\S+)\\s+TO\\s+NOW-(\\S+)))\\s+(TUMBLING|(STEP\\s+(\\S+)))",
+ Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern SPARQL_WHERE_CLAUSE_GRAPH_PATTERN = Pattern.compile(
+ "\\s*(GRAPH)\\s+(\\S+)\\s+\\{", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern RSP_QL_WHERE_CLAUSE_GRAPH_OR_WINDOW_PATTERN = Pattern.compile(
+ "\\s*(WINDOW|GRAPH)\\s+(\\S+)\\s+\\{", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern SPARQL_QUERY_SPLIT_PATTERN = Pattern.compile(
+ "(" + PREFIX_PATTERN.pattern() + "*)" + // prefix group 1
+ ".+(CONSTRUCT|SELECT|ASK|DESCRIBE)((.(?!FROM))*)" + // form group 3, result group 4
+ "(\\s*(FROM.+)*)" + // from clauses group 8
+ "(WHERE\\s*\\{(.+)})" + // where clause group 11
+ "([^{}]*)", // remainder group 12
+ Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern SPECIAL_SPARQL_PATTERN =
+ Pattern.compile("(OPTIONAL|UNION|GRAPH|BIND|GROUP BY|HAVING|MINUS|FILTER)" +
+ "(.(?!(OPTIONAL|UNION|GRAPH|BIND|GROUP BY|HAVING|MINUS|FILTER)))+",
+ Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern GROUP_BY_PATTERN =
+ Pattern.compile("GROUP\\s+BY\\s+(.(?!ORDER|LIMIT|OFFSET))+", Pattern.CASE_INSENSITIVE);
+
+ private static final Pattern PN_CHARS_BASE_PATTERN =
+ Pattern.compile("([A-Z]|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|" +
+ "[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|[\u2C00-\u2FEF]|" +
+ "[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|[\\x{10000}-\\x{EFFFF}])");
+ private static final Pattern PN_CHARS_U_PATTERN =
+ Pattern.compile(String.format("(%s)|_", PN_CHARS_BASE_PATTERN));
+ private static final Pattern PN_CHARS_PATTERN =
+ Pattern.compile(String.format(
+ "(%s)|-|[0-9]|\u00B7|[\u0300-\u036F]|[\u203F-\u2040]", PN_CHARS_U_PATTERN));
+ private static final Pattern PN_PREFIX_PATTERN =
+ Pattern.compile(String.format("(%s)(((%s)|'.')*(%s))?",
+ PN_CHARS_BASE_PATTERN, PN_CHARS_PATTERN, PN_CHARS_PATTERN));
+ private static final Pattern PN_NAME_NS_PATTERN =
+ Pattern.compile(String.format("(\\s|\\(|^|\\^)((%s)?:)", PN_PREFIX_PATTERN));
+ private static final Pattern VARNAME_PATTERN =
+ Pattern.compile(String.format(
+ "((%s)|[0-9])((%s)|[0-9]|\u00B7|[\u0300-\u036F]|[\u203F-\u2040])*",
+ PN_CHARS_U_PATTERN, PN_CHARS_U_PATTERN));
+ private static final Pattern VAR1_PATTERN =
+ Pattern.compile(String.format("\\?(%s)", VARNAME_PATTERN));
+
+ private static final Pattern USED_PREFIX_PATTERN = PN_NAME_NS_PATTERN;
+ private static final Pattern UNBOUND_VARIABLES_PATTERN = VAR1_PATTERN;
+ private static final Pattern UNBOUND_VARIABLES_IN_STREAM_WINDOW_PATTERN =
+ Pattern.compile(String.format("\\?\\{(%s)}", VARNAME_PATTERN));
+
+ private static final Pattern STREAM_WINDOW_PARAMETER_VARIABLE_PATTERN =
+ Pattern.compile(String.format("((%s)|(PT(%s)([SMH])))",
+ UNBOUND_VARIABLES_IN_STREAM_WINDOW_PATTERN,
+ UNBOUND_VARIABLES_IN_STREAM_WINDOW_PATTERN));
+ private static final Pattern STREAM_WINDOW_PARAMETER_NUMBER_PATTERN =
+ Pattern.compile("(PT([0-9]+)([SMH]))");
+
+ private static final Pattern SELECT_CLAUSE_EXPRESSION_PATTERN =
+ Pattern.compile(String.format("\\(\\s*(\\S+)\\s+AS\\s+(%s)\\s*\\)", VAR1_PATTERN));
+ private static final Pattern SELECT_CLAUSE_PATTERN_ENTRY =
+ Pattern.compile(String.format("((%s)|(%s))\\s+",
+ SELECT_CLAUSE_EXPRESSION_PATTERN, VAR1_PATTERN));
+ private static final Pattern SELECT_CLAUSE_PATTERN =
+ Pattern.compile(String.format("(%s)+", SELECT_CLAUSE_PATTERN_ENTRY));
+
+ private static final List POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS = new ArrayList<>();
+
+ static {
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("optional");
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("union");
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("graph");
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("bind");
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("group by");
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("having");
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("minus");
+ POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("filter");
+ }
+
+ private final DivideQueryGenerator divideQueryGenerator;
+ private final boolean processUnmappedVariableMatches;
+ private final boolean validateUnboundVariablesInRspQlQueryBody;
+
+ DivideQueryParser(boolean processUnmappedVariableMatches,
+ boolean validateUnboundVariablesInRspQlQueryBody) {
+ this.divideQueryGenerator = new DivideQueryGenerator();
+
+ this.processUnmappedVariableMatches = processUnmappedVariableMatches;
+ this.validateUnboundVariablesInRspQlQueryBody = validateUnboundVariablesInRspQlQueryBody;
+
+ // initialize Jena
+ org.apache.jena.query.ARQ.init();
+ }
+
+ DivideQueryParser() {
+ this(true, true);
+ }
+
+ @Override
+ public void validateDivideQueryContextEnrichment(ContextEnrichment contextEnrichment)
+ throws InvalidDivideQueryParserInputException {
+ // check all context-enriching queries
+ for (ContextEnrichingQuery query : contextEnrichment.getQueries()) {
+ // split query
+ SplitSparqlQuery splitSparqlQuery = splitSparqlQuery(" " + query.getQuery());
+
+ // ensure query is of CONSTRUCT form
+ if (splitSparqlQuery.getQueryForm() != QueryForm.CONSTRUCT) {
+ throw new InvalidDivideQueryParserInputException(
+ "Context-enriching query should be of CONSTRUCT form");
+ }
+
+ // ensure query does not contain any FROM clauses
+ if (splitSparqlQuery.getFromPart() != null &&
+ !splitSparqlQuery.getFromPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Context-enriching query should not contain any FROM clauses");
+ }
+
+ // ensure query does not contain any final part (solution modifiers)
+ if (splitSparqlQuery.getFinalPart() != null &&
+ !splitSparqlQuery.getFinalPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Context-enriching query should not contain any solution modifiers");
+ }
+
+ // ensure query is valid SPARQL
+ try (QueryExecution queryExecution = QueryExecutionFactory.create(
+ query.getQuery(), ModelFactory.createDefaultModel())) {
+ queryExecution.execConstruct();
+ } catch (Exception e) {
+ throw new InvalidDivideQueryParserInputException(
+ "Context-enriching query should be valid SPARQL");
+ }
+ }
+ }
+
+ @Override
+ public DivideQueryParserOutput parseDivideQuery(DivideQueryParserInput input)
+ throws InvalidDivideQueryParserInputException {
+ // make sure the input is validated & preprocessed
+ // (because the remainder of the parsing assumes valid & preprocessed input)
+ input.validate();
+ input.preprocess();
+
+ // process variable mapping between stream & final query if relevant
+ MappedDivideQueryParserInput mappedInput =
+ processStreamToFinalQueryVariableMappings(input);
+
+ // clean input: replace overlapping variables with new non-overlapping ones
+ CleanDivideQueryParserInput cleanInput =
+ cleanInputFromOverlappingVariables(mappedInput);
+
+ DivideQueryParserOutput result;
+ if (input.getInputQueryLanguage() == InputQueryLanguage.SPARQL) {
+ result = parseDivideQueryFromSparqlQueries(cleanInput);
+ } else if (input.getInputQueryLanguage() == InputQueryLanguage.RSP_QL) {
+ result = parseDivideQueryFromRspQlQuery(cleanInput);
+ } else {
+ // should not be possible
+ throw new InvalidDivideQueryParserInputException(
+ "Invalid input query language");
+ }
+
+ // process output again, based on variable mapping
+ result = restoreOriginalVariablesInOutput(result, cleanInput.getVariableMapping());
+
+ // increase the counter of the generator which is used to create unique
+ // pattern and prefixes IRIs
+ DivideQueryGenerator.COUNTER++;
+ return result;
+ }
+
+ private MappedDivideQueryParserInput processStreamToFinalQueryVariableMappings(
+ DivideQueryParserInput input) throws InvalidDivideQueryParserInputException {
+ // check if mappings should be analyzed: is the case for SPARQL query input where
+ // a final query is present
+ // NOTE: analyzing is also required with an empty mapping, to check all variable
+ // matches that are not defined in the mapping
+ boolean mappingAnalysisRequired =
+ input.getInputQueryLanguage() == InputQueryLanguage.SPARQL &&
+ input.getFinalQuery() != null && !input.getFinalQuery().trim().isEmpty();
+
+ // if no mapping analysis is required, we can continue with the original input
+ if (!mappingAnalysisRequired) {
+ return new MappedDivideQueryParserInput(input);
+ }
+
+ print("PROCESSING STREAM TO FINAL QUERY VARIABLE MAPPINGS");
+
+ // validate final query
+ String finalQuery = input.getFinalQuery();
+ validateSparqlQuery(finalQuery, "Final");
+
+ // split final query to be used further on
+ SplitSparqlQuery splitFinalQuery = splitSparqlQuery(finalQuery);
+
+ // retrieve mapping
+ Map mapping = input.getStreamToFinalQueryVariableMapping();
+
+ // further check mapping in case of ASK query
+ // -> for ASK queries, the result part is empty, so there is no part of
+ // the final query that will end up in the RSP-QL query body
+ // -> no mapping should be done
+ if (splitFinalQuery.getQueryForm() == QueryForm.ASK) {
+ // so in case the mapping is empty, we can continue with the original input
+ // -> if not, this is an indication of wrong input
+ if (mapping.isEmpty()) {
+ return new MappedDivideQueryParserInput(input);
+ } else {
+ throw new InvalidDivideQueryParserInputException(
+ "No stream to final query variable mapping should be provided " +
+ "if the final query is an ASK query.");
+ }
+ }
+
+ // IF THIS POINT IS REACHED, A VARIABLE MATCH & MAPPING CHECK SHOULD BE DONE
+ // -> based on the mappings, the stream and final query should both be analyzed
+ // -> if adaptations to variable names are required, only the final query will
+ // be updated
+ // BUT: what about variables occurring in other input parts?
+ // -> solution modifier: this is used in the final RSP-QL query, of which the
+ // WHERE clause is fully extracted from the stream query
+ // -> stream windows: variables occurring in the stream windows should always be
+ // replaced as window parameter during the query derivation, so
+ // they should either occur in the stream part of the stream
+ // query, or they are just put there to allow replacement of the
+ // default window parameter value via context-enriching queries
+ // -> intermediate queries: they are used separately as extra rules in addition
+ // to the sensor query rule, but not used in the sensor
+ // query rule so no matching is required of them
+ // CONCLUSION: if no updates are made to the variables as how they occur in the
+ // stream query, then no updates are required to the variables occurring
+ // in the solution modifier, stream windows & intermediate queries
+ // => to align all matches and remove identical variable names for non-matches,
+ // it suffices to only make updates to variable names in final query
+
+ // extract all variables occurring in stream query and final query
+ List streamQueryVariables = findUnboundVariables(input.getStreamQuery());
+ List finalQueryVariables = findUnboundVariables(input.getFinalQuery());
+
+ // check if all variable mappings are valid, i.e. whether all keys are variable
+ // names in the stream query, and all values are variable names in final query
+ if (!streamQueryVariables.containsAll(mapping.keySet())) {
+ throw new InvalidDivideQueryParserInputException(
+ "Stream to final query variable mapping contains variable " +
+ "names that do not occur in stream query");
+ }
+ if (!finalQueryVariables.containsAll(mapping.values())) {
+ throw new InvalidDivideQueryParserInputException(
+ "Stream to final query variable mapping contains variable " +
+ "names that do not occur in final query");
+ }
+
+ // check if mapping file contains no conflicts
+ Set mappingValues = new HashSet<>();
+ for (String s : mapping.keySet()) {
+ if (mappingValues.contains(mapping.get(s))) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Stream to final query variable mapping contains " +
+ "duplicate mapping to variable '%s'", mapping.get(s)));
+ }
+ mappingValues.add(mapping.get(s));
+ }
+
+ // create reverse mapping to know the required replacements from the point of
+ // view of the final query
+ Map reverseMapping = new HashMap<>();
+ for (String s : mapping.keySet()) {
+ reverseMapping.put(mapping.get(s), s);
+ }
+
+ // keep track of list of required variable replacements in final query
+ Map requiredReplacements = new HashMap<>();
+
+ // create set of all possible conflicting variables to ensure that
+ // no conflicts are created with the newest variables
+ // -> this set should of course contain all final query variables
+ // (both old and new)
+ // -> but also all stream query variables, to avoid a potential replacement
+ // of a variable in the final query to an already existing variable in the
+ // stream query
+ // (note that conflicts are very unlikely because new random variable names are
+ // obtained from a random UUID generator, but it is still better to be safe)
+ Set conflictingVariables = new HashSet<>(finalQueryVariables);
+ conflictingVariables.addAll(streamQueryVariables);
+
+ // loop over all variables occurring in the final query
+ // -> a replacement entry should be created for ALL variables
+ // (also the ones that should not be actually replaced: for them,
+ // a replacement to themselves should be created)
+ for (String finalQueryVariable : finalQueryVariables) {
+ if (reverseMapping.containsKey(finalQueryVariable)) {
+ // if the variable has a defined mapping, the required replacement in
+ // the final query is obvious
+ requiredReplacements.put(
+ finalQueryVariable, reverseMapping.get(finalQueryVariable));
+ conflictingVariables.add(reverseMapping.get(finalQueryVariable));
+ print("Add defined mapping: " + finalQueryVariable +
+ " to " + reverseMapping.get(finalQueryVariable));
+
+ } else if (streamQueryVariables.contains(finalQueryVariable) &&
+ (mapping.containsKey(finalQueryVariable) || !processUnmappedVariableMatches)) {
+ // if the final query variable also occurs in the stream query, and there
+ // is no specifically defined variable in the stream query to which this
+ // matches, then it depends on 2 things to decide whether this variable
+ // should be replaced:
+ // 1. if the variable also occurs as key of the mapping, then it should be
+ // replace by a random new variable, because there will be another final
+ // variable that is replaced by this variable
+ // 2. if not, then the variable does not occur in the mapping (not in the key set
+ // if condition 1 above is not fulfilled, and not in the value set since
+ // the reverse mapping's key set does not contain this variable)
+ // -> then it depends on how to handle unmapped matches: if unmapped variable
+ // matches should not be processed, this means that they cannot be considered
+ // as a match, even though their names happen to be identical
+ // -> then a replacement is also required
+ // (otherwise, they can be considered as a match, and this means they
+ // can be left unchanged)
+
+ // -> the final query variable should be replaced to a new variable
+ // that is not occurring in the stream query, and that is also not
+ // yet occurring in the final query
+ boolean variableAccepted = false;
+ while (!variableAccepted) {
+ String triedNewVariable = generateRandomUnboundVariable();
+ // there may be no final query variable that equals this name or
+ // of which the new variable is a substring
+ variableAccepted = conflictingVariables
+ .stream()
+ .noneMatch(s -> s.equals(triedNewVariable) ||
+ s.contains(triedNewVariable));
+ if (variableAccepted) {
+ requiredReplacements.put(
+ finalQueryVariable, triedNewVariable);
+ conflictingVariables.add(triedNewVariable);
+ print("Add additional mapping: " +
+ finalQueryVariable + " to " + triedNewVariable);
+ }
+ }
+
+ } else {
+ // if it's a variable that is not occurring in the stream query, and also not
+ // a variable that should be mapped, then it can be left as is
+ // -> a replacement to itself should then be created
+ requiredReplacements.put(finalQueryVariable, finalQueryVariable);
+ }
+ }
+
+ // split replacement list in two to first do some temporal replacements
+ // -> these replacements will be done first before doing the actual replacements
+ // -> this is to avoid that conflicts occur with cross-referenced mappings, e.g.,
+ // where ?a is mapped to ?b and ?b is mapped to ?a
+ // -> this works if the resulting variables after replacement are unique, i.e.,
+ // they do not occur as such in the list of variables or as a substring of any
+ // of these variables
+ Map temporalReplacements = new HashMap<>();
+ Map finalReplacements = new HashMap<>();
+ for (Map.Entry requiredReplacement : requiredReplacements.entrySet()) {
+ String temporalVariable = "";
+ boolean variableAccepted = false;
+ while (!variableAccepted) {
+ String triedNewVariable = generateRandomUnboundVariable();
+ // there may be no final query variable that equals this name or
+ // of which the new variable is a substring
+ variableAccepted = conflictingVariables
+ .stream()
+ .noneMatch(s -> s.equals(triedNewVariable) ||
+ s.contains(triedNewVariable));
+ if (variableAccepted) {
+ temporalVariable = triedNewVariable;
+ conflictingVariables.add(triedNewVariable);
+ }
+ }
+
+ // split up replacements
+ temporalReplacements.put(
+ requiredReplacement.getKey(), temporalVariable);
+ finalReplacements.put(
+ temporalVariable, requiredReplacement.getValue());
+ }
+
+ print("Temporal replacements: " + temporalReplacements);
+ print("Final replacements: " + finalReplacements);
+
+ // first do temporal replacements
+ List sortedTemporalReplacementKeys = temporalReplacements.keySet()
+ .stream()
+ .sorted((s1, s2) -> s1.contains(s2) ?
+ (s1.equals(s2) ? 0 : -1) :
+ (s2.contains(s1) ? 1 : s1.compareTo(s2)))
+ .collect(Collectors.toList());
+ print("Order of temporal replacements: " + sortedTemporalReplacementKeys);
+ for (String key : sortedTemporalReplacementKeys) {
+ finalQuery = finalQuery.replaceAll(
+ Pattern.quote(key), temporalReplacements.get(key));
+ }
+ print("Final query after temporal replacements: " + finalQuery);
+
+ // then also do final replacements
+ List finalTemporalReplacementKeys = finalReplacements.keySet()
+ .stream()
+ .sorted((s1, s2) -> s1.contains(s2) ?
+ (s1.equals(s2) ? 0 : -1) :
+ (s2.contains(s1) ? 1 : s1.compareTo(s2)))
+ .collect(Collectors.toList());
+ print("Order of final replacements: " + finalTemporalReplacementKeys);
+ for (String key : finalTemporalReplacementKeys) {
+ finalQuery = finalQuery.replaceAll(
+ Pattern.quote(key), finalReplacements.get(key));
+ }
+ print("Final query after final replacements: " + finalQuery);
+ print("======================================");
+
+ return new MappedDivideQueryParserInput(
+ input.getInputQueryLanguage(),
+ input.getStreamWindows(),
+ input.getStreamQuery(),
+ input.getIntermediateQueries(),
+ finalQuery,
+ input.getSolutionModifier(),
+ requiredReplacements);
+ }
+
+ private DivideQueryParserOutput parseDivideQueryFromSparqlQueries(CleanDivideQueryParserInput input)
+ throws InvalidDivideQueryParserInputException {
+ // validate stream query
+ validateSparqlQuery(input.getStreamQuery(), "Stream");
+
+ // parse stream query
+ ParsedSparqlQuery parsedStreamQuery = parseSparqlQuery(input.getStreamQuery());
+
+ // if final query of input is not present, and query form of stream query
+ // is not CONSTRUCT, a new input should be constructed in order to properly
+ // deal with this!
+ if (input.getFinalQuery() == null &&
+ parsedStreamQuery.getSplitSparqlQuery().getQueryForm() != QueryForm.CONSTRUCT) {
+ String constructTemplate;
+ String newStreamQuery;
+ String newFinalQuery;
+
+ if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.SELECT) {
+ // in case of a SELECT query, all variables occurring in the
+ // SELECT clause should be transformed to a CONSTRUCT template
+ // -> first parse SELECT clause
+ List selectVariables = parseSelectClause(
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart());
+
+ // only retain those that match the actual variable mapping, excluding
+ // "(... AS ?...)" definitions -> only those should be mapped to CONSTRUCT template
+ List actualSelectVariables = selectVariables
+ .stream()
+ .filter(s -> UNBOUND_VARIABLES_PATTERN.matcher(s).matches())
+ .collect(Collectors.toList());
+
+ // create CONSTRUCT template with random triple for each variable
+ constructTemplate = actualSelectVariables
+ .stream()
+ .map(s -> String.format("%s " +
+ " .",
+ s, UUID.randomUUID(), UUID.randomUUID()))
+ .collect(Collectors.joining(" "));
+
+ // create updated final SELECT query based on CONSTRUCT template and original input
+ newFinalQuery = String.format("%s SELECT %s WHERE { %s }",
+ parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(),
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart(),
+ constructTemplate).trim();
+
+ } else if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.DESCRIBE) {
+ // in case of a DESCRIBE query, all variables occurring in the
+ // DESCRIBE clause should be transformed to a CONSTRUCT template
+ // -> first parse DESCRIBE clause
+ List describeVariables = new ArrayList<>();
+ Matcher m = UNBOUND_VARIABLES_PATTERN.matcher(
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart());
+ while (m.find()) {
+ describeVariables.add(m.group());
+ }
+
+ // create CONSTRUCT template with random triple for each variable
+ constructTemplate = describeVariables
+ .stream()
+ .map(s -> String.format("%s " +
+ " .",
+ s, UUID.randomUUID(), UUID.randomUUID()))
+ .collect(Collectors.joining(" "));
+
+ // create updated final DESCRIBE query based on CONSTRUCT template and original input
+ newFinalQuery = String.format("%s DESCRIBE %s WHERE { %s }",
+ parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(),
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart(),
+ constructTemplate).trim();
+
+ } else { // QueryForm.ASK
+ // in case of an ASK query, no variables occur in the result part
+ // -> a random triple should be generated to link both queries
+ constructTemplate = String.format(
+ " " +
+ " "+
+ " .",
+ UUID.randomUUID(), UUID.randomUUID(),
+ UUID.randomUUID());
+
+ // create updated final DESCRIBE query based on CONSTRUCT template and original input
+ newFinalQuery = String.format("%s ASK WHERE { %s }",
+ parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(),
+ constructTemplate).trim();
+ }
+
+ // create updated stream query based on CONSTRUCT template and original input
+ newStreamQuery = String.format("%s\nCONSTRUCT\n{\n%s\n}\n%s\nWHERE {\n%s\n} %s",
+ parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(),
+ constructTemplate,
+ parsedStreamQuery.getSplitSparqlQuery().getFromPart(),
+ parsedStreamQuery.getSplitSparqlQuery().getWherePart(),
+ parsedStreamQuery.getSplitSparqlQuery().getFinalPart()).trim();
+
+ // create new parser input based on new stream & final queries, and copy other entries
+ CleanDivideQueryParserInput newInput = new CleanDivideQueryParserInput(
+ input.getInputQueryLanguage(),
+ input.getStreamWindows(),
+ newStreamQuery,
+ new ArrayList<>(),
+ newFinalQuery,
+ input.getSolutionModifier(),
+ input.getVariableMapping());
+ newInput.setUnboundVariables(input.getUnboundVariables());
+ newInput.setFinalQueryVariableMapping(input.getFinalQueryVariableMapping());
+ newInput.preprocess();
+
+ // perform the parsing again for this adapted input
+ return parseDivideQueryFromSparqlQueries(newInput);
+ }
+
+ // check if stream query has no final part
+ if (parsedStreamQuery.getSplitSparqlQuery().getFinalPart() != null &&
+ !parsedStreamQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Input queries cannot contain any solution modifiers, since this" +
+ " cannot be preserved by DIVIDE (because individual" +
+ " instantiated queries are generated). Any solution modifier" +
+ " for the queries derived by DIVIDE can be defined as a" +
+ " separate input entry.");
+ }
+
+ // validate stream window definitions
+ for (StreamWindow streamWindow : input.getStreamWindows()) {
+ Matcher m = RSP_QL_WINDOW_PARAMETERS_PATTERN.matcher(
+ streamWindow.getWindowDefinition());
+ if (!m.matches()) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Stream window with name '%s' contains invalid" +
+ " RSP-QL window definition", streamWindow.getStreamIri()));
+ }
+ }
+
+ // retrieve the graph names used in the FROM clauses of this SPARQL query
+ Pair, String> inputGraphNamesResult = retrieveGraphNamesFromSparqlFromPart(
+ parsedStreamQuery.getSplitSparqlQuery().getFromPart(),
+ parsedStreamQuery.getPrefixes());
+ List inputGraphNames = inputGraphNamesResult.getLeft();
+
+ // parse remainder of FROM clause: it can only contain default graph patterns
+ String fromPartLeftover = inputGraphNamesResult.getRight();
+ Matcher m = SPARQL_FROM_DEFAULT_GRAPH_PATTERN.matcher(fromPartLeftover);
+ while (m.find()) {
+ fromPartLeftover = fromPartLeftover.replace(m.group().trim(), "").trim();
+ }
+ if (!fromPartLeftover.trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("SPARQL query contains invalid part '%s'", fromPartLeftover));
+ }
+
+ // parse the WHERE clause based on the used prefixes & defined input graph names
+ WhereClause streamQueryWhereClause = parseWhereClauseOfQuery(
+ parsedStreamQuery.getSplitSparqlQuery().getWherePart(),
+ parsedStreamQuery.getPrefixes(),
+ inputGraphNames,
+ InputQueryLanguage.SPARQL);
+
+ // parse where clause of stream query
+ ParsedStreamQueryWhereClause parsedStreamQueryWhereClause =
+ parseStreamQueryWhereClauseOfQuery(
+ streamQueryWhereClause,
+ input.getStreamWindows()
+ .stream()
+ .map(StreamWindow::getStreamIri)
+ .collect(Collectors.toList()));
+
+ // validate parsed where clause of stream query: there should be at least
+ // 1 graph on a stream IRI (otherwise there is no point of constructing
+ // RSP queries with DIVIDE)
+ if (parsedStreamQueryWhereClause.getStreamItems()
+ .stream()
+ .noneMatch(whereClauseItem
+ -> whereClauseItem.getItemType() == WhereClauseItemType.GRAPH)) {
+ throw new InvalidDivideQueryParserInputException(
+ "Stream query should at least contain 1 graph on stream IRI in WHERE clause");
+ }
+
+ // validate defined solution modifier as valid SPARQL
+ List solutionModifierVariables = new ArrayList<>();
+ if (!input.getSolutionModifier().trim().isEmpty()) {
+ solutionModifierVariables.addAll(
+ findUnboundVariables(input.getSolutionModifier()));
+ try {
+ List selectVariables = new ArrayList<>();
+ List whereClauseVariables = new ArrayList<>();
+ Matcher solutionModifierMatcher =
+ GROUP_BY_PATTERN.matcher(input.getSolutionModifier());
+ if (solutionModifierMatcher.find()) {
+ selectVariables.addAll(findUnboundVariables(solutionModifierMatcher.group()));
+ whereClauseVariables.addAll(solutionModifierVariables);
+ } else {
+ if (solutionModifierVariables.isEmpty()) {
+ selectVariables.add("?x");
+ } else {
+ selectVariables.addAll(solutionModifierVariables);
+ }
+ whereClauseVariables.addAll(selectVariables);
+ }
+ String testQuery = String.format("SELECT %s WHERE { %s } %s",
+ String.join(" ", selectVariables),
+ whereClauseVariables.stream().map(s -> s + " ?a ?b . ").
+ collect(Collectors.joining(" ")),
+ input.getSolutionModifier());
+ QueryFactory.create(testQuery);
+ } catch (QueryParseException e) {
+ throw new InvalidDivideQueryParserInputException(
+ "Defined solution modifier is no valid SPARQL");
+ }
+ }
+
+ // validate variables used in stream window definitions
+ // -> first parse to check if they should be mapped to a new variable
+ // based on the preprocessing
+ // -> then check if antecedent of sensor query rule will contain this variable,
+ // OR that a default value is specified for this variable in the config
+ List parsedStreamWindows = new ArrayList<>();
+ for (StreamWindow streamWindow : input.getStreamWindows()) {
+ ParsedStreamWindow parsedStreamWindow =
+ parseStreamWindow(streamWindow, input.getVariableMapping());
+
+ List unboundVariablesInContext =
+ findUnboundVariables(parsedStreamQueryWhereClause.getContextPart());
+ for (String unboundVariable : parsedStreamWindow.getUnboundVariables()) {
+ if (parsedStreamWindow.getDefaultWindowParameterValues().containsKey(unboundVariable)) {
+ if (unboundVariablesInContext.contains(unboundVariable)) {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "Variables defined in the stream window parameters should either occur " +
+ "in the context part of the stream query (in order to be able " +
+ "to be substituted during the query derivation), OR a default " +
+ "value for this variable should be specified in the " +
+ "configuration. For variable %s, the first condition is " +
+ "fulfilled, so a default value cannot be specified in the " +
+ "configuration.", input.getReverseVariableMapping().getOrDefault(
+ unboundVariable, unboundVariable)));
+ }
+ } else {
+ if (!unboundVariablesInContext.contains(unboundVariable)) {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "Variables defined in the stream window parameters should either occur " +
+ "in the context part of the stream query (in order to be able " +
+ "to be substituted during the query derivation), OR a default " +
+ "value for this variable should be specified in the " +
+ "configuration. For variable %s, the first condition is not " +
+ "fulfilled, so a default value should be specified in the " +
+ "configuration.", input.getReverseVariableMapping().getOrDefault(
+ unboundVariable, unboundVariable)));
+ }
+ }
+ }
+
+ parsedStreamWindows.add(parsedStreamWindow);
+ }
+
+ // declare variables which need to be initialized differently
+ // based on the queries in the parser input
+ String resultingQueryOutput;
+ QueryForm resultingQueryForm;
+ String goal;
+ List intermediateQueries = new ArrayList<>();
+ Set queryPatternPrefixes;
+ Set sensorQueryRulePrefixes;
+
+ // if no final query is present, the streaming query is the only input
+ // (there can also be no intermediate queries without a final query)
+ if (input.getFinalQuery() == null) {
+ // you already know it is a CONSTRUCT query, otherwise it will have been
+ // transformed to a new input above
+
+ // in that case, the original output of the streaming query is also
+ // the output of the RSP-QL query generated with DIVIDE
+ // (and similarly for the form of this query)
+ resultingQueryOutput = parsedStreamQuery.getSplitSparqlQuery().getResultPart();
+ resultingQueryForm = parsedStreamQuery.getSplitSparqlQuery().getQueryForm();
+
+ // in this case, the query pattern prefixes can simply be the prefixes used
+ // in the streaming query & sensor query rule
+ queryPatternPrefixes = new HashSet<>(parsedStreamQuery.getPrefixes());
+ sensorQueryRulePrefixes = new HashSet<>(parsedStreamQuery.getPrefixes());
+
+ // in this case, the reasoner goal for DIVIDE is simply this query output
+ // in both antecedent & consequence
+ goal = divideQueryGenerator.createGoal(
+ parsedStreamQuery.getPrefixes(),
+ resultingQueryOutput,
+ resultingQueryOutput);
+
+ } else {
+ // if a final query is present, it should be ensured that the stream query
+ // is of CONSTRUCT form (only the final query can have another form)
+ if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() != QueryForm.CONSTRUCT) {
+ throw new InvalidDivideQueryParserInputException(
+ "Stream query should be a CONSTRUCT query if another " +
+ "final query is specified");
+ }
+
+ // parse final query
+ ParsedSparqlQuery parsedFinalQuery = parseSparqlQuery(input.getFinalQuery());
+
+ // check if WHERE clause exists
+ if (parsedFinalQuery.getSplitSparqlQuery().getWherePart() == null ||
+ parsedFinalQuery.getSplitSparqlQuery().getWherePart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Final query of %s form should have a non-empty " +
+ "WHERE clause.%s", parsedFinalQuery.getSplitSparqlQuery().getQueryForm(),
+ parsedFinalQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.ASK ?
+ " For a final ASK query, the WHERE keyword should be " +
+ "explicitly mentioned." : ""));
+ }
+
+ // check if result part is empty for ASK queries (= required!)
+ if (parsedFinalQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.ASK &&
+ parsedFinalQuery.getSplitSparqlQuery().getResultPart() != null &&
+ !parsedFinalQuery.getSplitSparqlQuery().getResultPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Final query of ASK form should fulfill regex 'ASK (FROM .*)* WHERE {.*}'.");
+ }
+
+ // the final query may not contain any FROM definitions
+ if (parsedFinalQuery.getSplitSparqlQuery().getFromPart() != null &&
+ !parsedFinalQuery.getSplitSparqlQuery().getFromPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Final query cannot contain any FROM parts");
+ }
+
+ // check if final query has no final part
+ if (parsedFinalQuery.getSplitSparqlQuery().getFinalPart() != null &&
+ !parsedFinalQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Input queries cannot contain any solution modifiers, since this" +
+ " cannot be preserved by DIVIDE (because individual" +
+ " instantiated queries are generated). Any solution modifier" +
+ " for the queries derived by DIVIDE can be defined as a" +
+ " separate input entry.");
+ }
+
+ // ensure no conflicts exist between parsed final query & prefixes in stream query
+ parsedFinalQuery = solvePrefixConflicts(
+ parsedStreamQuery.getPrefixes(), parsedFinalQuery);
+
+ // in this case, the original output of the final query is also
+ // the output of the RSP-QL query generated with DIVIDE
+ // (and similarly for the form of this query)
+ resultingQueryOutput = parsedFinalQuery.getSplitSparqlQuery().getResultPart();
+ resultingQueryForm = parsedFinalQuery.getSplitSparqlQuery().getQueryForm();
+
+ // in this case, the prefixes of both the stream & final query need
+ // to be merged to be used for the query pattern
+ queryPatternPrefixes = new HashSet<>(parsedStreamQuery.getPrefixes());
+ queryPatternPrefixes.addAll(parsedFinalQuery.getPrefixes());
+
+ // the sensor query rule prefixes set should only contain the prefixes
+ // of the stream query (potentially added later with the prefixes of
+ // any intermediate queries)
+ sensorQueryRulePrefixes = new HashSet<>(parsedStreamQuery.getPrefixes());
+
+ // in this case, the goal is constructed differently based on the query form:
+ if (resultingQueryForm == QueryForm.CONSTRUCT) {
+ // in case of a CONSTRUCT query, the goal takes the WHERE clause of
+ // the final query as antecedent, and the output of the CONSTRUCT
+ // query (i.e., the CONSTRUCT clause) as consequence
+ goal = divideQueryGenerator.createGoal(
+ parsedFinalQuery.getPrefixes(),
+ parsedFinalQuery.getSplitSparqlQuery().getWherePart(),
+ resultingQueryOutput);
+
+ } else { // QueryForm.ASK, QueryForm.DESCRIBE or QueryForm.SELECT
+ // in case of a SELECT, ASK or DESCRIBE query, both the antecedent and
+ // the consequence of the goal are set to the WHERE clause of the final query
+ goal = divideQueryGenerator.createGoal(
+ parsedFinalQuery.getPrefixes(),
+ parsedFinalQuery.getSplitSparqlQuery().getWherePart(),
+ parsedFinalQuery.getSplitSparqlQuery().getWherePart());
+ }
+
+ // if intermediate queries are provided, they should be parsed and
+ // added to the inputs for the creation of the sensor query rule
+ if (!input.getIntermediateQueries().isEmpty()) {
+ for (String intermediateQueryString : input.getIntermediateQueries()) {
+ // split intermediate query
+ ParsedSparqlQuery parsedIntermediateQuery =
+ parseSparqlQuery(intermediateQueryString);
+
+ // check if intermediate query has no final part
+ if (parsedIntermediateQuery.getSplitSparqlQuery().getFinalPart() != null &&
+ !parsedIntermediateQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Input queries cannot contain any solution modifiers, since this" +
+ " cannot be preserved by DIVIDE (because individual" +
+ " instantiated queries are generated). Any solution modifier" +
+ " for the queries derived by DIVIDE can be defined as a" +
+ " separate input entry.");
+ }
+
+ // ensure no conflicts exist between parsed intermediate query & prefixes
+ // in stream & final query
+ parsedIntermediateQuery = solvePrefixConflicts(
+ queryPatternPrefixes, parsedIntermediateQuery);
+
+ // add prefixes to prefixes used for sensor query rule
+ sensorQueryRulePrefixes.addAll(parsedIntermediateQuery.getPrefixes());
+
+ // ensure that intermediate query is of CONSTRUCT form (only the final query
+ // can have another form)
+ if (parsedIntermediateQuery.getSplitSparqlQuery().getQueryForm()
+ != QueryForm.CONSTRUCT) {
+ throw new InvalidDivideQueryParserInputException(
+ "Intermediate queries should always be CONSTRUCT queries");
+ }
+
+ // save intermediate query
+ intermediateQueries.add(parsedIntermediateQuery);
+ }
+ }
+ }
+
+ // convert the parsed stream windows into a set of converted stream windows
+ List convertedStreamWindows = new ArrayList<>();
+ for (ParsedStreamWindow parsedStreamWindow : parsedStreamWindows) {
+ convertedStreamWindows.add(convertParsedStreamWindow(parsedStreamWindow));
+ }
+
+ // generate RSP-QL query based on parsing output
+ RspQlQueryBody rspQlQueryBody = divideQueryGenerator.createRspQlQueryBody(
+ resultingQueryForm,
+ resultingQueryOutput,
+ parsedStreamQueryWhereClause.getStreamItems(),
+ input.getSolutionModifier(),
+ convertedStreamWindows,
+ this);
+
+ // retrieve input variables for sensor query rule
+ List inputVariables = retrieveInputVariables(
+ parsedStreamQueryWhereClause.getContextPart(),
+ rspQlQueryBody.getUnboundVariables());
+
+ // check unbound variables of generated RSP-QL query body
+ if (validateUnboundVariablesInRspQlQueryBody) {
+ validateUnboundVariablesInRspQlQueryBody(
+ rspQlQueryBody, inputVariables, input.getReverseVariableMapping(),
+ input.getFinalQueryVariableMapping());
+ }
+
+ // check that solution modifier does not contain an input variable
+ if (inputVariables.stream().anyMatch(
+ solutionModifierVariables::contains)) {
+ throw new InvalidDivideQueryParserInputException(
+ "Solution modifier contains variable that will be instantiated " +
+ "by the DIVIDE query derivation");
+ }
+
+ // check that solution modifier only contains variables that are occurring
+ // in the RSP-QL query body
+ if (!findUnboundVariables(rspQlQueryBody.getQueryBody().replace(
+ input.getSolutionModifier(), ""))
+ .containsAll(solutionModifierVariables)) {
+ throw new InvalidDivideQueryParserInputException(
+ "Solution modifier contains variables that do not occur in the " +
+ "instantiated RSP-QL query body");
+ }
+
+ // save some variables that might or might not be updated below
+ String sensorQueryRuleContextPart = parsedStreamQueryWhereClause.getContextPart();
+ List parsedStreamQueryWhereClauseStreamItems =
+ parsedStreamQueryWhereClause.getStreamItems();
+ String parsedStreamQueryResultPart =
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart();
+
+ // check to update RSP-QL body string for SELECT queries
+ if (resultingQueryForm == QueryForm.SELECT) {
+ // retrieve SELECT variables from output
+ List selectVariables = parseSelectClause(resultingQueryOutput);
+
+ // adaptations are only needed if any of the select variables is an input
+ // variable of the sensor query rule (because then it will be substituted)
+ List selectInputVariables = selectVariables
+ .stream()
+ .filter(inputVariables::contains)
+ .collect(Collectors.toList());
+ if (!selectInputVariables.isEmpty()) {
+ // calculate all input variables in the DIVIDE parser input
+ Set allInputVariables = input.getUnboundVariables();
+
+ // generate a random DIVIDE variable for all SELECT input variables
+ Map variableMapping = new HashMap<>();
+ for (String selectInputVariable : selectInputVariables) {
+ String newVariable = null;
+ boolean variableAccepted = false;
+ while (!variableAccepted) {
+ String triedNewVariable = generateRandomUnboundVariable();
+ // there may be no existing input variable that is contained in this
+ // possible new input variable
+ variableAccepted = allInputVariables
+ .stream()
+ .noneMatch(triedNewVariable::contains);
+ if (variableAccepted) {
+ newVariable = triedNewVariable;
+ }
+ }
+ variableMapping.put(selectInputVariable, newVariable);
+ }
+
+ // update list of input variables
+ inputVariables = inputVariables
+ .stream()
+ .map(s -> variableMapping.getOrDefault(s, s))
+ .collect(Collectors.toList());
+
+ // update sensor query rule context & consequence
+ for (String selectInputVariable : selectInputVariables) {
+ sensorQueryRuleContextPart = sensorQueryRuleContextPart.replace(
+ selectInputVariable, variableMapping.get(selectInputVariable));
+ parsedStreamQueryResultPart = parsedStreamQueryResultPart.replace(
+ selectInputVariable, variableMapping.get(selectInputVariable));
+ }
+
+ // update stream windows
+ List newConvertedStreamWindows = new ArrayList<>();
+ for (ConvertedStreamWindow convertedStreamWindow : convertedStreamWindows) {
+ String iri = convertedStreamWindow.getStreamIri();
+ String windowDefinition = convertedStreamWindow.getWindowDefinition();
+ for (String selectInputVariable : selectInputVariables) {
+ windowDefinition = windowDefinition.replaceAll(
+ Pattern.quote(String.format("?{%s}", selectInputVariable.substring(1))),
+ String.format("?{%s}", variableMapping.get(selectInputVariable).substring(1)));
+ }
+ List windowParameters = convertedStreamWindow.getWindowParameters();
+ windowParameters = windowParameters
+ .stream()
+ .map(wp -> new WindowParameter(
+ variableMapping.getOrDefault(wp.getVariable(), wp.getVariable()),
+ wp.isValueSubstitutionVariable() ?
+ variableMapping.getOrDefault(wp.getVariable(), wp.getVariable()) :
+ wp.getValue(),
+ wp.getType(),
+ wp.isValueSubstitutionVariable()))
+ .collect(Collectors.toList());
+ newConvertedStreamWindows.add(
+ new ConvertedStreamWindow(iri, windowDefinition, windowParameters));
+ }
+ convertedStreamWindows = new ArrayList<>(newConvertedStreamWindows);
+
+ // update RSP-QL query body
+ String solutionModifier = input.getSolutionModifier();
+ for (String selectInputVariable : selectInputVariables) {
+ solutionModifier = solutionModifier.replace(
+ selectInputVariable, variableMapping.get(selectInputVariable));
+ }
+ resultingQueryOutput = selectVariables
+ .stream()
+ .map(s -> variableMapping.containsKey(s)
+ ? String.format("(%s AS %s)", variableMapping.get(s), s)
+ : s)
+ .collect(Collectors.joining(" "));
+ List whereClauseStreamItems = new ArrayList<>();
+ for (WhereClauseItem item : parsedStreamQueryWhereClauseStreamItems) {
+ if (item.getItemType() == WhereClauseItemType.EXPRESSION) {
+ WhereClauseExpressionItem expressionItem = (WhereClauseExpressionItem) item;
+ String expression = expressionItem.getExpression();
+ for (String selectInputVariable : selectInputVariables) {
+ expression = expression.replace(
+ selectInputVariable, variableMapping.get(selectInputVariable));
+ }
+ whereClauseStreamItems.add(new WhereClauseExpressionItem(expression));
+
+ } else if (item.getItemType() == WhereClauseItemType.GRAPH) {
+ WhereClauseGraphItem graphItem = (WhereClauseGraphItem) item;
+ Graph graph = graphItem.getGraph();
+ String expression = graph.getClause();
+ for (String selectInputVariable : selectInputVariables) {
+ expression = expression.replace(
+ selectInputVariable, variableMapping.get(selectInputVariable));
+ }
+ whereClauseStreamItems.add(new WhereClauseGraphItem(
+ new Graph(graph.getName(), expression)));
+ }
+ }
+ parsedStreamQueryWhereClauseStreamItems = new ArrayList<>(whereClauseStreamItems);
+ rspQlQueryBody = divideQueryGenerator.createRspQlQueryBody(
+ resultingQueryForm,
+ resultingQueryOutput,
+ whereClauseStreamItems,
+ solutionModifier,
+ convertedStreamWindows,
+ this);
+ }
+ }
+
+ // update output to be used for sensor query
+ String sensorQueryRuleResult = extendOutputOfStreamQueryForSensorQueryRule(
+ parsedStreamQueryWhereClauseStreamItems,
+ parsedStreamQueryResultPart,
+ sensorQueryRulePrefixes);
+
+ // generate query pattern based on RSP-QL query body and parsing output
+ String queryPattern = divideQueryGenerator.createQueryPattern(
+ resultingQueryForm,
+ queryPatternPrefixes,
+ rspQlQueryBody.getQueryBody());
+
+ // retrieve output variables for sensor query rule
+ List outputVariables = retrieveOutputVariables(
+ sensorQueryRuleContextPart,
+ sensorQueryRuleResult);
+
+ // generate sensor query rule
+ List allWindowParameters = new ArrayList<>();
+ for (ConvertedStreamWindow convertedStreamWindow : convertedStreamWindows) {
+ allWindowParameters.addAll(convertedStreamWindow.getWindowParameters());
+ }
+ String sensorQueryRule = divideQueryGenerator.createSensorQueryRule(
+ sensorQueryRulePrefixes,
+ sensorQueryRuleContextPart,
+ sensorQueryRuleResult,
+ inputVariables,
+ allWindowParameters,
+ outputVariables,
+ intermediateQueries);
+
+ return new DivideQueryParserOutput(
+ queryPattern, sensorQueryRule, goal, resultingQueryForm);
+ }
+
+ private DivideQueryParserOutput parseDivideQueryFromRspQlQuery(CleanDivideQueryParserInput input)
+ throws InvalidDivideQueryParserInputException {
+ // only the main stream query should be considered in this case
+ // window parameters are taken from the query
+
+ // parse the RSP-QL stream query
+ ParsedSparqlQuery parsedStreamQuery = parseRspQlQuery(input.getStreamQuery());
+
+ // check if stream query has no final part
+ if (parsedStreamQuery.getSplitSparqlQuery().getFinalPart() != null &&
+ !parsedStreamQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ "Input queries cannot contain any solution modifiers, since this" +
+ " cannot be preserved by DIVIDE (because individual" +
+ " instantiated queries are generated). Any solution modifier" +
+ " for the queries derived by DIVIDE can be defined as a" +
+ " separate input entry.");
+ }
+
+ // remove any specified default graph from the SPARQL query
+ String streamQueryFromPart = parsedStreamQuery.getSplitSparqlQuery().getFromPart();
+ Matcher m = RSP_QL_FROM_DEFAULT_GRAPH_PATTERN.matcher(streamQueryFromPart);
+ while (m.find()) {
+ streamQueryFromPart = streamQueryFromPart.replace(m.group().trim(), "");
+ }
+
+ // retrieve the graph names & stream windows used in the FROM clauses of this SPARQL query
+ Pair, String> inputGraphNamesResult = retrieveGraphNamesFromRspQlFromPart(
+ streamQueryFromPart,
+ parsedStreamQuery.getPrefixes());
+ List inputGraphNames = inputGraphNamesResult.getLeft();
+ String streamQueryFromPartLeftover = inputGraphNamesResult.getRight();
+ Map streamWindowMap =
+ completeStreamWindowsFromRspQlFromPart(
+ input.getStreamWindows(),
+ streamQueryFromPartLeftover,
+ parsedStreamQuery.getPrefixes());
+ inputGraphNames.addAll(streamWindowMap.keySet());
+
+ // only allow CONSTRUCT RSP-QL queries
+ // -> if they are of other form, they are translated to SPARQL and
+ // further parsed as if they were a SPARQL query
+ if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() != QueryForm.CONSTRUCT) {
+ // create SPARQL FROM part
+ StringBuilder sparqlFromPart = new StringBuilder();
+ for (String inputGraphName : inputGraphNames) {
+ sparqlFromPart.append(String.format("FROM NAMED %s ",
+ streamWindowMap.containsKey(inputGraphName)
+ ? streamWindowMap.get(inputGraphName).getStreamIri()
+ : inputGraphName));
+ }
+
+ // create SPARQL WHERE clause
+ Matcher m1 = Pattern.compile("WINDOW\\s+(\\S+)").matcher(
+ parsedStreamQuery.getSplitSparqlQuery().getWherePart());
+ String sparqlWhereClause = parsedStreamQuery.getSplitSparqlQuery().getWherePart();
+ while (m1.find()) {
+ sparqlWhereClause = sparqlWhereClause.replaceFirst(
+ m1.group(),
+ String.format("GRAPH %s", streamWindowMap.get(
+ resolveGraphName(m1.group(1),
+ parsedStreamQuery.getPrefixes())).getStreamIri()));
+ }
+
+ // translate RSP-QL stream query to SPARQL
+ String sparqlStreamQuery = String.format("%s %s %s %s WHERE { %s } %s",
+ parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(),
+ parsedStreamQuery.getSplitSparqlQuery().getQueryForm().toString(),
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart(),
+ sparqlFromPart,
+ sparqlWhereClause,
+ parsedStreamQuery.getSplitSparqlQuery().getFinalPart());
+
+ // construct new SPARQL input
+ CleanDivideQueryParserInput newInput = new CleanDivideQueryParserInput(
+ InputQueryLanguage.SPARQL,
+ new ArrayList<>(streamWindowMap.values()),
+ sparqlStreamQuery,
+ new ArrayList<>(),
+ null,
+ input.getSolutionModifier(),
+ input.getVariableMapping());
+ newInput.setUnboundVariables(input.getUnboundVariables());
+ newInput.setFinalQueryVariableMapping(input.getFinalQueryVariableMapping());
+ newInput.preprocess();
+
+ print("RSP-QL query has no CONSTRUCT form => converted to SPARQL " +
+ "=> new input:\n" + newInput);
+
+ return parseDivideQueryFromSparqlQueries(newInput);
+ }
+
+ // parse the WHERE clause based on the used prefixes & defined input graph names
+ WhereClause streamQueryWhereClause = parseWhereClauseOfQuery(
+ parsedStreamQuery.getSplitSparqlQuery().getWherePart(),
+ parsedStreamQuery.getPrefixes(),
+ inputGraphNames,
+ InputQueryLanguage.RSP_QL);
+
+ // validate stream query
+ validateSparqlQuery(String.format("%s CONSTRUCT { %s } WHERE { %s }",
+ parsedStreamQuery.getPrefixes()
+ .stream()
+ .map(prefix -> String.format("PREFIX %s %s",
+ prefix.getName(), prefix.getUri()))
+ .collect(Collectors.joining(" ")),
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart(),
+ streamQueryWhereClause.getItems()
+ .stream()
+ .map(WhereClauseItem::getClause)
+ .collect(Collectors.joining(" "))),
+ "Stream");
+
+ // loop over WHERE clause items and adapt graph expression items:
+ // use actual graph name instead of window name
+ List newStreamQueryWhereClauseItems = new ArrayList<>();
+ for (WhereClauseItem item : streamQueryWhereClause.getItems()) {
+ if (item.getItemType() == WhereClauseItemType.GRAPH) {
+ WhereClauseGraphItem graphItem = (WhereClauseGraphItem) item;
+ if (streamWindowMap.containsKey(graphItem.getGraph().getName())) {
+ newStreamQueryWhereClauseItems.add(new WhereClauseGraphItem(
+ new Graph(streamWindowMap.get(graphItem.getGraph().getName()).getStreamIri(),
+ graphItem.getGraph().getClause())));
+ } else {
+ newStreamQueryWhereClauseItems.add(item);
+ }
+ } else {
+ newStreamQueryWhereClauseItems.add(item);
+ }
+ }
+ streamQueryWhereClause = new WhereClause(newStreamQueryWhereClauseItems);
+
+ // parse where clause of stream query
+ ParsedStreamQueryWhereClause parsedStreamQueryWhereClause =
+ parseStreamQueryWhereClauseOfQuery(
+ streamQueryWhereClause,
+ streamWindowMap.values()
+ .stream()
+ .map(StreamWindow::getStreamIri)
+ .collect(Collectors.toList()));
+
+ // validate parsed where clause of stream query: there should be at least
+ // 1 graph on a stream IRI (otherwise there is no point of constructing
+ // RSP queries with DIVIDE)
+ if (parsedStreamQueryWhereClause.getStreamItems()
+ .stream()
+ .noneMatch(whereClauseItem
+ -> whereClauseItem.getItemType() == WhereClauseItemType.GRAPH)) {
+ throw new InvalidDivideQueryParserInputException(
+ "Stream query should at least contain 1 graph on stream IRI in WHERE clause");
+ }
+
+ // validate defined solution modifier
+ List solutionModifierVariables = new ArrayList<>();
+ if (!input.getSolutionModifier().trim().isEmpty()) {
+ solutionModifierVariables.addAll(
+ findUnboundVariables(input.getSolutionModifier()));
+ try {
+ List selectVariables = new ArrayList<>();
+ List whereClauseVariables = new ArrayList<>();
+ Matcher solutionModifierMatcher =
+ GROUP_BY_PATTERN.matcher(input.getSolutionModifier());
+ if (solutionModifierMatcher.find()) {
+ selectVariables.addAll(findUnboundVariables(solutionModifierMatcher.group()));
+ whereClauseVariables.addAll(solutionModifierVariables);
+ } else {
+ if (solutionModifierVariables.isEmpty()) {
+ selectVariables.add("?x");
+ } else {
+ selectVariables.addAll(solutionModifierVariables);
+ }
+ whereClauseVariables.addAll(selectVariables);
+ }
+ String testQuery = String.format("SELECT %s WHERE { %s } %s",
+ String.join(" ", selectVariables),
+ whereClauseVariables.stream().map(s -> s + " ?a ?b . ").
+ collect(Collectors.joining(" ")),
+ input.getSolutionModifier());
+ QueryFactory.create(testQuery);
+ } catch (QueryParseException e) {
+ throw new InvalidDivideQueryParserInputException(
+ "Defined solution modifier is no valid SPARQL");
+ }
+ }
+
+ // validate variables used in stream window definitions
+ // -> first parse to check if they should be mapped to a new variable
+ // based on the preprocessing
+ // -> then check if antecedent of sensor query rule will contain this variable,
+ // OR that a default value is specified for this variable in the config
+ List parsedStreamWindows = new ArrayList<>();
+ for (StreamWindow streamWindow : streamWindowMap.values()) {
+ ParsedStreamWindow parsedStreamWindow =
+ parseStreamWindow(streamWindow, input.getVariableMapping());
+
+ List unboundVariablesInContext =
+ findUnboundVariables(parsedStreamQueryWhereClause.getContextPart());
+ for (String unboundVariable : parsedStreamWindow.getUnboundVariables()) {
+ if (parsedStreamWindow.getDefaultWindowParameterValues().containsKey(unboundVariable)) {
+ if (unboundVariablesInContext.contains(unboundVariable)) {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "Variables defined in the stream window parameters should either occur " +
+ "in the context part of the stream query (in order to be able " +
+ "to be substituted during the query derivation), OR a default " +
+ "value for this variable should be specified in the " +
+ "configuration. For variable %s, the first condition is " +
+ "fulfilled, so a default value cannot be specified in the " +
+ "configuration.", input.getReverseVariableMapping().getOrDefault(
+ unboundVariable, unboundVariable)));
+ }
+ } else {
+ if (!unboundVariablesInContext.contains(unboundVariable)) {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "Variables defined in the stream window parameters should either occur " +
+ "in the context part of the stream query (in order to be able " +
+ "to be substituted during the query derivation), OR a default " +
+ "value for this variable should be specified in the " +
+ "configuration. For variable %s, the first condition is not " +
+ "fulfilled, so a default value should be specified in the " +
+ "configuration.", input.getReverseVariableMapping().getOrDefault(
+ unboundVariable, unboundVariable)));
+ }
+ }
+ }
+
+ parsedStreamWindows.add(parsedStreamWindow);
+ }
+
+ // declare variables which need to be initialized
+ // based on the queries in the parser input
+ String resultingQueryOutput;
+ QueryForm resultingQueryForm;
+ String goal;
+ List intermediateQueries = new ArrayList<>();
+ Set queryPatternPrefixes;
+ Set sensorQueryRulePrefixes;
+
+ // the original output of the streaming query is also
+ // the output of the RSP-QL query generated with DIVIDE
+ // (and similarly for the form of this query)
+ resultingQueryOutput = parsedStreamQuery.getSplitSparqlQuery().getResultPart();
+ resultingQueryForm = parsedStreamQuery.getSplitSparqlQuery().getQueryForm();
+
+ // in this case, the query pattern prefixes can simply be the prefixes used
+ // in the streaming query & sensor query rule
+ queryPatternPrefixes = new HashSet<>(parsedStreamQuery.getPrefixes());
+ sensorQueryRulePrefixes = new HashSet<>(parsedStreamQuery.getPrefixes());
+
+ // in this case, the reasoner goal for DIVIDE is simply this query output
+ // in both antecedent & consequence
+ goal = divideQueryGenerator.createGoal(
+ parsedStreamQuery.getPrefixes(),
+ resultingQueryOutput,
+ resultingQueryOutput);
+
+ // convert the parsed stream windows into a set of converted stream windows
+ List convertedStreamWindows = new ArrayList<>();
+ for (ParsedStreamWindow parsedStreamWindow : parsedStreamWindows) {
+ convertedStreamWindows.add(convertParsedStreamWindow(parsedStreamWindow));
+ }
+
+ // generate RSP-QL query based on parsing output
+ RspQlQueryBody rspQlQueryBody = divideQueryGenerator.createRspQlQueryBody(
+ resultingQueryForm,
+ resultingQueryOutput,
+ parsedStreamQueryWhereClause.getStreamItems(),
+ input.getSolutionModifier(),
+ convertedStreamWindows,
+ this);
+
+ // generate query pattern based on RSP-QL query body and parsing output
+ // -> first, merge set of prefixes from
+ String queryPattern = divideQueryGenerator.createQueryPattern(
+ resultingQueryForm,
+ queryPatternPrefixes,
+ rspQlQueryBody.getQueryBody());
+
+ // update output to be used for sensor query
+ String sensorQueryResult = extendOutputOfStreamQueryForSensorQueryRule(
+ parsedStreamQueryWhereClause.getStreamItems(),
+ parsedStreamQuery.getSplitSparqlQuery().getResultPart(),
+ sensorQueryRulePrefixes);
+
+ // retrieve input and output variables for sensor query rule
+ List inputVariables = retrieveInputVariables(
+ parsedStreamQueryWhereClause.getContextPart(),
+ rspQlQueryBody.getUnboundVariables());
+ List outputVariables = retrieveOutputVariables(
+ parsedStreamQueryWhereClause.getContextPart(),
+ sensorQueryResult);
+
+ // check unbound variables of generated RSP-QL query body
+ if (validateUnboundVariablesInRspQlQueryBody) {
+ validateUnboundVariablesInRspQlQueryBody(
+ rspQlQueryBody, inputVariables, input.getReverseVariableMapping(),
+ input.getFinalQueryVariableMapping());
+ }
+
+ // check that solution modifier does not contain an input variable
+ if (inputVariables.stream().anyMatch(
+ solutionModifierVariables::contains)) {
+ throw new InvalidDivideQueryParserInputException(
+ "Solution modifier contains variable that will be instantiated " +
+ "by the DIVIDE query derivation");
+ }
+
+ // check that solution modifier only contains variables that are occurring
+ // in the RSP-QL query body
+ if (!findUnboundVariables(rspQlQueryBody.getQueryBody().replace(
+ input.getSolutionModifier(), ""))
+ .containsAll(solutionModifierVariables)) {
+ throw new InvalidDivideQueryParserInputException(
+ "Solution modifier contains variables that do not occur in the " +
+ "instantiated RSP-QL query body");
+ }
+
+ // generate sensor query rule
+ List allWindowParameters = new ArrayList<>();
+ for (ConvertedStreamWindow convertedStreamWindow : convertedStreamWindows) {
+ allWindowParameters.addAll(convertedStreamWindow.getWindowParameters());
+ }
+ String sensorQueryRule = divideQueryGenerator.createSensorQueryRule(
+ sensorQueryRulePrefixes,
+ parsedStreamQueryWhereClause.getContextPart(),
+ sensorQueryResult,
+ inputVariables,
+ allWindowParameters,
+ outputVariables,
+ intermediateQueries);
+
+ return new DivideQueryParserOutput(
+ queryPattern, sensorQueryRule, goal, resultingQueryForm);
+ }
+
+ /**
+ * Solves prefix conflicts in a given parsed SPARQL query.
+ * To do so, the method checks whether any of the prefix names in the given
+ * set of existing prefixes occurs somewhere in the SPARQL query. If this is
+ * the case, the corresponding URI should be identical to the existing prefix.
+ * If not, the parsed SPARQL query should be updated: a new prefix should be
+ * created and used in the corresponding query parts.
+ *
+ * @param existingPrefixes set of prefixes to which the prefixes in the given
+ * SPARQL query should be compared and solved in case
+ * of conflicts
+ * @param parsedSparqlQuery parsed SPARQL query which needs to be checked for
+ * any prefix conflicts
+ * @return new parsed SPARQL query in which all possible prefix conflicts
+ * are resolved
+ */
+ private ParsedSparqlQuery solvePrefixConflicts(Set existingPrefixes,
+ ParsedSparqlQuery parsedSparqlQuery) {
+ // create a list of conflicting prefixes in the given SPARQL query
+ Set conflictingPrefixes = new HashSet<>();
+ for (Prefix prefix : parsedSparqlQuery.getPrefixes()) {
+ for (Prefix existingPrefix : existingPrefixes) {
+ // there is a conflict if a prefix of the given SPARQL query has the
+ // same name as an existing prefix, but another URI
+ if (existingPrefix.getName().equals(prefix.getName()) &&
+ !existingPrefix.getUri().equals(prefix.getUri())) {
+ conflictingPrefixes.add(prefix);
+ break;
+ }
+ }
+ }
+
+ // if there are no conflicts, the same parsed SPARQL query can be returned
+ if (conflictingPrefixes.isEmpty()) {
+ return parsedSparqlQuery;
+
+ } else {
+ // otherwise, the conflicting prefixes should be given a different name
+ // -> start from current fields
+ String prefixPart = parsedSparqlQuery.getSplitSparqlQuery().getPrefixPart();
+ String resultPart = parsedSparqlQuery.getSplitSparqlQuery().getResultPart();
+ String wherePart = parsedSparqlQuery.getSplitSparqlQuery().getWherePart();
+ Set prefixes = new HashSet<>(parsedSparqlQuery.getPrefixes());
+
+ for (Prefix conflictingPrefix : conflictingPrefixes) {
+ String newPrefixName = null;
+ Prefix newPrefix = null;
+
+ // check if a prefix with the same URI already existed
+ for (Prefix existingPrefix : existingPrefixes) {
+ if (existingPrefix.getUri().equals(conflictingPrefix.getUri())) {
+ // if so, this one can be reused!
+ newPrefixName = existingPrefix.getName();
+ newPrefix = existingPrefix;
+ }
+ }
+ // if not, create a new prefix with new name and same URI
+ if (newPrefixName == null) {
+ newPrefixName = String.format("newPrefix%d:", PREFIX_COUNTER++);
+ newPrefix = new Prefix(newPrefixName, conflictingPrefix.getUri());
+ }
+
+ // replace prefix name in existing query parts
+ Pattern replacingPattern =
+ Pattern.compile("(\\s|\\(|^|\\^)" + conflictingPrefix.getName());
+ Matcher m1 = replacingPattern.matcher(prefixPart);
+ prefixPart = m1.replaceAll("$1" + newPrefixName);
+ if ((parsedSparqlQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.CONSTRUCT ||
+ parsedSparqlQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.DESCRIBE ||
+ parsedSparqlQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.SELECT)
+ && resultPart != null) {
+ Matcher m2 = replacingPattern.matcher(resultPart);
+ resultPart = m2.replaceAll("$1" + newPrefixName);
+ }
+ if (wherePart != null) {
+ Matcher m3 = replacingPattern.matcher(wherePart);
+ wherePart = m3.replaceAll("$1" + newPrefixName);
+ }
+
+ // update set of prefixes
+ prefixes.remove(conflictingPrefix);
+ prefixes.add(newPrefix);
+ }
+
+ // return updated query
+ return new ParsedSparqlQuery(
+ new SplitSparqlQuery(
+ prefixPart,
+ parsedSparqlQuery.getSplitSparqlQuery().getQueryForm(),
+ resultPart,
+ parsedSparqlQuery.getSplitSparqlQuery().getFromPart(),
+ wherePart,
+ parsedSparqlQuery.getSplitSparqlQuery().getFinalPart()),
+ prefixes);
+ }
+ }
+
+ /**
+ * Validates the occurrence of unbound variables in the RSP-QL query body generated
+ * by this parser. If validation succeeds, this method returns after performing its
+ * checks. If validation fails, a {@link InvalidDivideQueryParserInputException} is
+ * thrown.
+ * Validation fails if the RSP-QL query body contains variables in its result part
+ * that are not occurring in the WHERE clause, and also not in the input variables
+ * defined for substitution for the query derivation. If such variables exist, they
+ * will lead to errors when registering a query.
+ * Validation also fails if the RSP-QL query body is a SELECT query which contains
+ * "... AS ?var" expressions, where ?var is already occurring as variable name in
+ * the WHERE clause of the query OR in the list of input variables.
+ *
+ * @param rspQlQueryBody RSP-QL query body that is about to be validated
+ * @param inputVariables input variables for the DIVIDE query derivation, that may
+ * occur in the RSP-QL query body but will later be substituted
+ * @param outputMapping mapping of variables in the parser input to the original input
+ * variables (needs to be provided to give a clear error message
+ * to the end user about the problematic variables, is not used
+ * as such for the validation)
+ * @throws InvalidDivideQueryParserInputException when validation fails
+ */
+ private void validateUnboundVariablesInRspQlQueryBody(RspQlQueryBody rspQlQueryBody,
+ List inputVariables,
+ Map outputMapping,
+ Map finalQueryMapping)
+ throws InvalidDivideQueryParserInputException {
+ print("VALIDATING UNBOUND VARIABLES IN RSP-QL QUERY BODY");
+
+ // obtain all unbound variables in WHERE clause
+ Set unboundVariablesInWherePart =
+ new HashSet<>(findUnboundVariables(rspQlQueryBody.getWherePart()));
+
+ // obtain all unbound variables in result part
+ // -> special caveat required for SELECT queries
+ Pair, Set> unboundVariablesInResultPart =
+ findUnboundVariablesInQueryResultPart(
+ rspQlQueryBody.getResultPart(), rspQlQueryBody.getQueryForm());
+ Set expectedUnboundVariablesInResultPart = unboundVariablesInResultPart.getLeft();
+ Set forbiddenUnboundVariablesInResultPart = unboundVariablesInResultPart.getRight();
+
+ print("Output mapping: " + outputMapping);
+ print("Final query variable mapping: " + finalQueryMapping);
+
+ print("Mappings for expected unbound variables in result part " +
+ "(var -> output mapping -> final query variable mapping)");
+ for (String s : expectedUnboundVariablesInResultPart) {
+ String m1 = outputMapping.getOrDefault(s, s);
+ String m2 = finalQueryMapping.getOrDefault(m1, m1);
+ print(String.format(" %s - %s - %s", s, m1, m2));
+ }
+
+ print("Forbidden variables: " + forbiddenUnboundVariablesInResultPart);
+ print("WHERE clause: " + rspQlQueryBody.getWherePart());
+ print("Unbound variables in WHERE clause: " + unboundVariablesInWherePart);
+ // validate that none of the forbidden variables occurs in the WHERE clause
+ List problematicVariables = forbiddenUnboundVariablesInResultPart
+ .stream()
+ .filter(s -> unboundVariablesInWherePart.contains(s)
+ || inputVariables.contains(s)
+ || expectedUnboundVariablesInResultPart.contains(s))
+ .map(s -> outputMapping.getOrDefault(s, s))
+ .collect(Collectors.toList());
+ if (!problematicVariables.isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "The SELECT clause of the resulting RSP-QL query body " +
+ "will contain template variables that are not allowed in the " +
+ "WHERE clause, but that are present there: %s. This is probably " +
+ "caused by an invalid SELECT clause in the stream or final query. " +
+ "Make sure this clause is valid. Also make sure that a correct " +
+ "mapping file is provided, and/or that variables with identical " +
+ "names should be mapped (if this is enabled via the settings).",
+ String.join(", ", problematicVariables)));
+ }
+
+ print("Validating unbound variables for: " + rspQlQueryBody.getQueryBody());
+ print("Input variables at this point: " + inputVariables);
+ // check if the result part of the RSP-QL query body does not contain any
+ // invalid unbound variables
+ // -> invalid means that they do not occur in the WHERE clause, and also not
+ // in the set of input variables that are about to be replaced
+ problematicVariables = expectedUnboundVariablesInResultPart
+ .stream()
+ .filter(s -> !unboundVariablesInWherePart.contains(s)
+ && !inputVariables.contains(s))
+ .map(s -> {
+ String m1 = outputMapping.getOrDefault(s, s);
+ return finalQueryMapping.getOrDefault(m1, m1);
+ })
+ .collect(Collectors.toList());
+ if (!problematicVariables.isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "Resulting RSP-QL query body will contain invalid variables in result part, " +
+ "that are not present in WHERE clause and will also not be replaced " +
+ "during the DIVIDE query derivation: %s. Make sure the input is correct. " +
+ "If the input contains a final query, make sure to define a mapping of a " +
+ "variable in the stream query to each of these variable in the final query " +
+ "(or allow automatic mapping of matching variable names via the settings). " +
+ "If the input only contains a stream query, make sure the WHERE clause " +
+ "of the stream query contains these variables.",
+ String.join(", ", problematicVariables)));
+ }
+
+ print("======================================");
+ }
+
+ private Pair, Set> findUnboundVariablesInQueryResultPart(String result,
+ QueryForm queryForm)
+ throws InvalidDivideQueryParserInputException {
+ print("-> FINDING UNBOUND VARIABLES IN QUERY RESULT PART");
+ if (queryForm == QueryForm.SELECT) {
+ Set expectedVariables = new HashSet<>();
+ Set forbiddenVariables = new HashSet<>();
+ String formattedSelectClause = String.format("%s ", result.trim());
+ if (SELECT_CLAUSE_PATTERN.matcher(formattedSelectClause).matches()) {
+ Matcher m = SELECT_CLAUSE_PATTERN_ENTRY.matcher(formattedSelectClause);
+ while(m.find()) {
+ String match = m.group().trim();
+ Matcher m2 = SELECT_CLAUSE_EXPRESSION_PATTERN.matcher(match);
+ if (m2.matches()) {
+ print(" Expression pattern match: '" + match + "'");
+ // if it matches the expression "... AS ?...", then only the first part
+ // should be returned as a variable, IF it is a variable of course
+ m2.reset();
+ while (m2.find()) {
+ if (VAR1_PATTERN.matcher(m2.group(1)).matches()) {
+ expectedVariables.add(m2.group(1));
+ print(" Varname in expression: '" + m2.group(1) + "'");
+ } else {
+ print(" NO varname in expression: '" + m2.group(1) + "'");
+ }
+ forbiddenVariables.add(m2.group(2));
+ print(" Forbidden variable: " + m2.group(2));
+ }
+ } else {
+ print(" Varname match: '" + match + "'");
+ // if no match with expression, then this match is a single variable name
+ expectedVariables.add(match);
+ }
+ }
+ return Pair.create(expectedVariables, forbiddenVariables);
+
+ } else {
+ throw new InvalidDivideQueryParserInputException(
+ "SELECT clause of resulting RSP-QL query is invalid, which is probably " +
+ "caused by an invalid SELECT clause in the stream or final query.");
+ }
+ } else {
+ return Pair.create(
+ new HashSet<>(findUnboundVariables(result)),
+ new HashSet<>());
+ }
+ }
+
+ /**
+ * @param query SPARQL query body string
+ * @return parsed version of the given SPARQL query
+ * @throws InvalidDivideQueryParserInputException if the query is of invalid syntax
+ */
+ @Override
+ public ParsedSparqlQuery parseSparqlQuery(String query)
+ throws InvalidDivideQueryParserInputException {
+ // first split SPARQL query into its different parts
+ SplitSparqlQuery splitSparqlQuery = splitSparqlQuery(query);
+
+ // retrieve the prefixes used in this SPARQL query
+ Set prefixes = getPrefixes(splitSparqlQuery.getPrefixPart());
+
+ // check for conflicting prefixes
+ Map prefixMap = new HashMap<>();
+ for (Prefix prefix : prefixes) {
+ if (prefixMap.containsKey(prefix.getName()) &&
+ !prefixMap.get(prefix.getName()).equals(prefix.getUri())) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Multiple prefixes are present with name '%s'",
+ prefix.getName()));
+ }
+ prefixMap.put(prefix.getName(), prefix.getUri());
+ }
+
+ // check for prefix names occurring in query string without being defined as a prefix
+ String queryWithoutPrefixes = query.replace(splitSparqlQuery.getPrefixPart(), "");
+ Set existingPrefixNames = prefixMap.keySet();
+ Matcher m = USED_PREFIX_PATTERN.matcher(queryWithoutPrefixes);
+ while (m.find()) {
+ if (!existingPrefixNames.contains(m.group(2)) && !"_:".equals(m.group(2))) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Query string contains undefined prefix '%s'", m.group(2)));
+ }
+ }
+
+ // remove prefixes that do not occur in query body
+ prefixes.removeIf(prefix -> !Pattern.compile("(\\s|\\(|^|\\^)" + prefix.getName())
+ .matcher(queryWithoutPrefixes).find());
+
+ return new ParsedSparqlQuery(
+ splitSparqlQuery,
+ prefixes);
+ }
+
+ /**
+ * @param query RSP-QL query body string
+ * @return parsed version of the given RSP-QL query
+ * @throws InvalidDivideQueryParserInputException if the query is of invalid syntax
+ */
+ private ParsedSparqlQuery parseRspQlQuery(String query)
+ throws InvalidDivideQueryParserInputException {
+ // first split RSP-QL query as a SPARQL query into its different parts
+ SplitSparqlQuery splitSparqlQuery = splitSparqlQuery(query);
+
+ // retrieve the prefixes used in this SPARQL query
+ Set prefixes = getPrefixes(splitSparqlQuery.getPrefixPart());
+
+ // check for conflicting prefixes
+ Map prefixMap = new HashMap<>();
+ for (Prefix prefix : prefixes) {
+ if (prefixMap.containsKey(prefix.getName()) &&
+ !prefixMap.get(prefix.getName()).equals(prefix.getUri())) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Multiple prefixes are present with name '%s'",
+ prefix.getName()));
+ }
+ prefixMap.put(prefix.getName(), prefix.getUri());
+ }
+
+ // check for prefix names occurring in query string without being defined as a prefix
+ String queryWithoutPrefixes = query.replace(splitSparqlQuery.getPrefixPart(), "");
+ Set existingPrefixNames = new HashSet<>(prefixMap.keySet());
+ Matcher m = USED_PREFIX_PATTERN.matcher(queryWithoutPrefixes);
+ while (m.find()) {
+ if (!existingPrefixNames.contains(m.group(2))) {
+ if (":".equals(m.group(2))) {
+ String prefixUri = "";
+ splitSparqlQuery = new SplitSparqlQuery(
+ splitSparqlQuery.getPrefixPart() + " PREFIX : " + prefixUri,
+ splitSparqlQuery.getQueryForm(),
+ splitSparqlQuery.getResultPart(),
+ splitSparqlQuery.getFromPart(),
+ splitSparqlQuery.getWherePart(),
+ splitSparqlQuery.getFinalPart());
+ existingPrefixNames.add(":");
+ prefixes.add(new Prefix(":", prefixUri));
+ } else {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Query string contains undefined prefix '%s'", m.group(2)));
+ }
+ }
+ }
+
+ // remove prefixes that do not occur in query body
+ prefixes.removeIf(prefix -> !Pattern.compile("(\\s|\\(|^|\\^)" + prefix.getName())
+ .matcher(queryWithoutPrefixes).find());
+
+ return new ParsedSparqlQuery(
+ splitSparqlQuery,
+ prefixes);
+ }
+
+ /**
+ * @param query SPARQL query body string
+ * @return split SPARQL query containing the different parts
+ * @throws InvalidDivideQueryParserInputException if the query is of invalid syntax
+ */
+ private SplitSparqlQuery splitSparqlQuery(String query)
+ throws InvalidDivideQueryParserInputException {
+ // try to match the query pattern on the SPARQL query
+ Matcher m = SPARQL_QUERY_SPLIT_PATTERN.matcher(query);
+ if (m.find()) {
+ // parse query form
+ QueryForm queryForm = QueryForm.fromString(m.group(5).trim());
+ if (queryForm == null) {
+ throw new InvalidDivideQueryParserInputException(
+ "Invalid query form specified in query");
+ }
+
+ // parse result part (output) & remove curly braces
+ String resultPart = m.group(6).trim();
+ resultPart = parseQueryResultPart(resultPart, queryForm);
+
+ // create split query & make sure all strings are trimmed
+ return new SplitSparqlQuery(
+ m.group(1) == null ? null : m.group(1).trim(),
+ queryForm,
+ resultPart,
+ m.group(8) == null ? null : m.group(8).trim(),
+ m.group(11) == null ? null : m.group(11).trim(),
+ m.group(12) == null ? null : m.group(12).trim());
+
+ } else {
+ throw new InvalidDivideQueryParserInputException(
+ "Query does not have valid SPARQL format");
+ }
+ }
+
+ /**
+ * Parses query result part (output) & removes curly braces if present.
+ *
+ * @param resultPart result part string
+ * @param queryForm form of query
+ * @return parsed result part string
+ * @throws InvalidDivideQueryParserInputException if result part is invalid
+ */
+ private String parseQueryResultPart(String resultPart, QueryForm queryForm)
+ throws InvalidDivideQueryParserInputException {
+ if (resultPart.startsWith("{")) {
+ if (resultPart.endsWith("}")) {
+ resultPart = resultPart.substring(1, resultPart.length() - 1).trim();
+ } else {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Format of %s clause is invalid", queryForm.toString()));
+ }
+ } else {
+ if (resultPart.endsWith("}")) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Format of %s clause is invalid", queryForm.toString()));
+ }
+ }
+ return resultPart;
+ }
+
+ /**
+ * @param prefixString string of prefixes as defined in a SPARQL or RSP-QL query
+ * @return parsed set of prefixes
+ */
+ private Set getPrefixes(String prefixString) {
+ Matcher m = PREFIX_PATTERN.matcher(prefixString);
+ Set prefixes = new HashSet<>();
+ while (m.find()) {
+ String prefixName = m.group(2).trim();
+ String prefixURI = m.group(3).trim();
+ prefixes.add(new Prefix(prefixName, prefixURI));
+ }
+ return prefixes;
+ }
+
+ /**
+ * @param fromPart string with FROM clauses as defined in SPARQL query
+ * @param prefixes set of prefixes to be used for resolving the graph names
+ * occurring in the FROM clauses of the SPARQL query
+ * @return pair with as left value the strings of actual graph names appearing
+ * in the FROM clauses, as right value the remainder of the FROM part
+ * with all matching named graphs removed
+ * @throws InvalidDivideQueryParserInputException if any of the graph names
+ * occurring in the FROM clause
+ * is invalid
+ */
+ private Pair, String> retrieveGraphNamesFromSparqlFromPart(String fromPart,
+ Set prefixes)
+ throws InvalidDivideQueryParserInputException {
+ String fromPartLeftover = fromPart;
+ Matcher matcher = SPARQL_FROM_NAMED_GRAPH_PATTERN.matcher(fromPart);
+ List graphNames = new ArrayList<>();
+ while (matcher.find()) {
+ graphNames.add(resolveGraphName(matcher.group(1), prefixes));
+ fromPartLeftover = fromPartLeftover.replace(matcher.group().trim(), "").trim();
+ }
+ return Pair.create(graphNames, fromPartLeftover);
+ }
+
+ /**
+ * @param fromPart string with FROM clauses as defined in RSP-QL query
+ * @param prefixes set of prefixes to be used for resolving the graph names
+ * occurring in the FROM clauses of the SPARQL query
+ * @return pair with as left value the strings of actual graph names appearing
+ * in the FROM clauses, as right value the remainder of the FROM part
+ * with all matching named graphs removed
+ * @throws InvalidDivideQueryParserInputException if any of the graph names
+ * occurring in the FROM clause
+ * is invalid
+ */
+ private Pair, String> retrieveGraphNamesFromRspQlFromPart(String fromPart,
+ Set prefixes)
+ throws InvalidDivideQueryParserInputException {
+ String fromPartLeftover = fromPart;
+ Matcher matcher = RSP_QL_FROM_NAMED_GRAPH_PATTERN.matcher(fromPart);
+ List graphNames = new ArrayList<>();
+ while (matcher.find()) {
+ graphNames.add(resolveGraphName(matcher.group(1), prefixes));
+ fromPartLeftover = fromPartLeftover.replace(matcher.group().trim(), "").trim();
+ }
+ return Pair.create(graphNames, fromPartLeftover);
+ }
+
+ /**
+ * @param streamWindows list of possibly incomplete stream windows, which might not contain
+ * the stream window definition
+ * @param fromPart string with FROM clauses as defined in RSP-QL query
+ * @return stream windows completed according to how they are appearing in these FROM clauses,
+ * associated in a map to a key representing the window name in the query
+ * @throws InvalidDivideQueryParserInputException when a window name is defined more
+ * than once, or if any of the graph names
+ * occurring in the FROM clause is invalid
+ */
+ private Map completeStreamWindowsFromRspQlFromPart(List streamWindows,
+ String fromPart,
+ Set prefixes)
+ throws InvalidDivideQueryParserInputException {
+ String fromPartLeftover = fromPart;
+ Matcher matcher = RSP_QL_FROM_NAMED_WINDOW_PATTERN.matcher(fromPart);
+ Map streamWindowMap = new HashMap<>();
+ while (matcher.find()) {
+ String windowName = resolveGraphName(matcher.group(1), prefixes);
+ if (streamWindowMap.containsKey(windowName)) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Window name '%s' defined more than once", windowName));
+ }
+ String streamName = resolveGraphName(matcher.group(2), prefixes);
+ Matcher m2 = RSP_QL_WINDOW_PARAMETERS_PATTERN.matcher(matcher.group(3));
+ if (m2.find()) {
+ streamWindowMap.put(windowName, new StreamWindow(streamName,
+ String.format("%s %s", m2.group(1), m2.group(7))));
+ } else {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Window definition of stream '%s' is no " +
+ "valid RSP-QL", streamName));
+ }
+ fromPartLeftover = fromPartLeftover.replace(matcher.group().trim(), "").trim();
+ }
+ if (!fromPartLeftover.trim().isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("RSP-QL query contains invalid part '%s'", fromPartLeftover));
+ }
+
+ // check if every stream window defined in the JSON config also occurs
+ // in the RSP-QL from part, and append the default window parameters
+ Collection rspQlStreamWindows = streamWindowMap.values();
+ for (StreamWindow definedStreamWindow : streamWindows) {
+ Optional matchingStreamWindow = rspQlStreamWindows.stream()
+ .filter(sw -> sw.getStreamIri().equals(definedStreamWindow.getStreamIri()))
+ .findFirst();
+ if (matchingStreamWindow.isPresent()) {
+ if (definedStreamWindow.getWindowDefinition() != null &&
+ !matchingStreamWindow.get().getWindowDefinition().equals(
+ definedStreamWindow.getWindowDefinition())) {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "Configuration contains stream window with IRI '%s' that has a different window " +
+ "definition than the corresponding stream window present in the " +
+ "RSP-QL stream query", definedStreamWindow.getStreamIri()));
+ }
+ matchingStreamWindow.get().setDefaultWindowParameterValues(
+ definedStreamWindow.getDefaultWindowParameterValues());
+ } else {
+ throw new InvalidDivideQueryParserInputException(String.format(
+ "Configuration contains stream window with IRI '%s' that does not occur " +
+ "in the RSP-QL stream query", definedStreamWindow.getStreamIri()));
+ }
+ }
+
+ return streamWindowMap;
+ }
+
+ /**
+ * @param whereClause extracted WHERE clause of a SPARQL or RSP-QL query
+ * @param prefixes set of prefixes used in this query
+ * @param inputGraphNames graph names specified in the FROM clauses of this query
+ * @param queryLanguage language used for specifying the WHERE clause of this query
+ * (this can either be SPARQL or RSP-QL)
+ * @return parsed WHERE clause of the query, containing a list of WHERE clause
+ * items which can either be graphs or expressions
+ * @throws InvalidDivideQueryParserInputException if the WHERE clause contains invalid
+ * graph names
+ */
+ private WhereClause parseWhereClauseOfQuery(String whereClause,
+ Set prefixes,
+ List inputGraphNames,
+ InputQueryLanguage queryLanguage)
+ throws InvalidDivideQueryParserInputException {
+ List items = new ArrayList<>();
+
+ // pattern to be used for parsing depends on query language
+ Pattern pattern;
+ if (queryLanguage == InputQueryLanguage.SPARQL) {
+ pattern = SPARQL_WHERE_CLAUSE_GRAPH_PATTERN;
+ } else { // RSP_QL query language
+ pattern = RSP_QL_WHERE_CLAUSE_GRAPH_OR_WINDOW_PATTERN;
+ }
+
+ // make sure WHERE clauses is trimmed before parsing
+ whereClause = whereClause.trim();
+
+ // try to find graph patterns in WHERE clauses
+ Matcher matcher = pattern.matcher(whereClause);
+ int lastEndIndex = 0;
+
+ // loop over all found graph patterns
+ while (matcher.find()) {
+ int startIndex = matcher.start();
+ int endIndex = matcher.end();
+
+ // update indices & create expression item if some text is found in between
+ // last match and this match
+ if (startIndex != lastEndIndex) {
+ String expression = whereClause.substring(lastEndIndex, startIndex).trim();
+ items.add(new WhereClauseExpressionItem(expression));
+ }
+
+ // find end of graph pattern
+ int braceCount = 1;
+ int loopIndex = endIndex + 1;
+ while (braceCount > 0) {
+ char c = whereClause.charAt(loopIndex);
+ if (c == '{') {
+ braceCount++;
+ } else if (c == '}') {
+ braceCount--;
+ }
+ loopIndex++;
+ }
+ lastEndIndex = loopIndex;
+
+ // parse name of found graph
+ String name = matcher.group(2).trim();
+ name = resolveGraphName(name, prefixes);
+ if (!inputGraphNames.contains(name)) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Graph name '%s' not specified with FROM", name));
+ }
+
+ // parse clause of found graph
+ String clause = whereClause.substring(endIndex + 1, loopIndex - 1).trim();
+
+ // create graph item for the found graph with parsed name & clause
+ items.add(new WhereClauseGraphItem(new Graph(name, clause)));
+ }
+
+ // process possible expression after last found graph
+ // (if no graph is found, this expression will contain the full WHERE clause)
+ if (lastEndIndex != whereClause.length()) {
+ String lastExpression = whereClause.substring(lastEndIndex).trim();
+ items.add(new WhereClauseExpressionItem(lastExpression));
+ }
+
+ return new WhereClause(items);
+ }
+
+ /**
+ * Resolves a graph name against a set of prefixes.
+ * If the graph name is not an IRI (<...>), then it should start with a
+ * prefix in the specified list.
+ *
+ * @param graphName graph name to be resolved
+ * @param prefixes set of prefixes to be used for resolving the graph name
+ * @return resolved graph name (can be the same as the input if it was
+ * already a valid IRI)
+ * @throws InvalidDivideQueryParserInputException if the graph name is invalid
+ * (invalid syntax or non-existing prefix)
+ */
+ private String resolveGraphName(String graphName, Set prefixes)
+ throws InvalidDivideQueryParserInputException {
+ // parse name of found graph
+ if (graphName.startsWith("<")) {
+ if (!graphName.endsWith(">")) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Invalid graph name '%s'", graphName));
+ }
+ } else {
+ boolean matched = false;
+ for (Prefix prefix : prefixes) {
+ if (graphName.startsWith(prefix.getName())) {
+ matched = true;
+ String afterPrefix = graphName.replaceFirst(Pattern.quote(prefix.getName()), "");
+ graphName = String.format("%s%s>",
+ prefix.getUri().substring(0, prefix.getUri().length() - 1),
+ afterPrefix);
+ break;
+ }
+ }
+ if (!matched) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("Invalid graph name '%s' (no valid IRI" +
+ " & no existing prefix used)", graphName));
+ }
+ }
+ return graphName;
+ }
+
+ /**
+ * Processes the WHERE clause items in the parsed WHERE clause of a SPARQL
+ * or RSP-QL query.
+ * This processing is an additional parsing step: the WHERE clause items are split
+ * based on whether they depend on the context, or depend on one or more streams
+ * specified as stream graph IRIs in the input of the parser. The context expressions
+ * are appended and returned as a string in the processed result, whereas the stream
+ * expressions are still returned as an ordered list of items (i.e., either graph
+ * patterns or expressions with a SPARQL keyword pattern outside a graph pattern).
+ * While processing, the items are verified and an exception is thrown if anything
+ * is not valid.
+ *
+ * @param whereClause parsed WHERE clause of SPARQL OR RSP-QL query
+ * @param streamGraphNames names (IRIs) of stream graphs as specified in the parser input
+ * @return the processed WHERE clause of the stream query
+ * @throws InvalidDivideQueryParserInputException if an expression is present outside a graph without
+ * an associated SPARQL keyword (these should be
+ * placed inside a graph pattern), OR when an expression
+ * is present inside a graph that is not reading from
+ * a stream, but that contains a SPARQL pattern (because
+ * then it is part of the context, and this context cannot
+ * contain any SPARQL patterns, so this pattern should
+ * be placed outside the graph then)
+ */
+ private ParsedStreamQueryWhereClause parseStreamQueryWhereClauseOfQuery(
+ WhereClause whereClause, List streamGraphNames)
+ throws InvalidDivideQueryParserInputException {
+ // prepare results of parsing
+ StringBuilder contextPart = new StringBuilder();
+ List streamItems = new ArrayList<>();
+
+ for (WhereClauseItem item : whereClause.getItems()) {
+ if (item.getItemType() == WhereClauseItemType.EXPRESSION) {
+ WhereClauseExpressionItem expressionItem = (WhereClauseExpressionItem) item;
+
+ // expression items are verified and split based on SPARQL keywords,
+ // and are included into the items that depend on the input stream(s)
+ streamItems.addAll(
+ verifyAndSplitStreamQueryWhereClauseExpressionItemsBySparqlKeywords(expressionItem));
+
+ } else if (item.getItemType() == WhereClauseItemType.GRAPH) {
+ WhereClauseGraphItem graphItem = (WhereClauseGraphItem) item;
+
+ // graph patterns are handled differently based on the specified name
+ // of the graph in the pattern
+ if (streamGraphNames.contains(graphItem.getGraph().getName())) {
+ // if the graph name is specified as a stream graph in the parser input,
+ // the whole pattern is included into the set of items that depend on
+ // the input stream(s)
+ streamItems.add(graphItem);
+ } else {
+ // if the graph name is NOT specified as a stream graph, then it should
+ // first be checked that its expressions doe not contain any SPARQL
+ // keyword (because these will become the context part which will be
+ // added as the consequence of the sensor query rule, so it cannot contain
+ // any SPARQL keywords)
+ String graphItemLowerCaseClause =
+ graphItem.getGraph().getClause().toLowerCase(Locale.ROOT);
+ boolean containsSparqlKeyword = POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS
+ .stream()
+ .anyMatch(graphItemLowerCaseClause::contains);
+ if (containsSparqlKeyword) {
+ throw new InvalidDivideQueryParserInputException(
+ "Non-streaming graph patterns of stream query cannot contain " +
+ "special SPARQL keywords - such expressions should " +
+ "be placed outside the graph");
+ }
+
+ // if no SPARQL keyword is present, the expressions in the graph pattern
+ // can be safely added to the context part of the stream query WHERE clause
+ contextPart.append(graphItem.getGraph().getClause()).append(" ");
+ }
+ }
+ }
+
+ return new ParsedStreamQueryWhereClause(
+ contextPart.toString().trim(), streamItems);
+ }
+
+ /**
+ * Verifies and splits an individual expression item of the parsed WHERE clause of
+ * the stream query. The splitting will split the individual items into a single part
+ * per SPARQL keyword pattern. The verification is a check whether no expressions
+ * occur in this expression item (i.e., outside a graph) without an associated
+ * SPARQL keyword.
+ *
+ * @param expressionItem individual expression item of the parsed WHERE clause of
+ * the stream query
+ * @return a list of expression items originating from the original expression item,
+ * but split based on SPARQL keyword patterns
+ * @throws InvalidDivideQueryParserInputException if an expression occurs in this expression
+ * item (i.e., outside a graph) without an
+ * associated SPARQL keyword (these should be
+ * placed inside a graph pattern)
+ */
+ private List verifyAndSplitStreamQueryWhereClauseExpressionItemsBySparqlKeywords(
+ WhereClauseExpressionItem expressionItem) throws InvalidDivideQueryParserInputException {
+ List resultItems = new ArrayList<>();
+ String expressionLeftover = expressionItem.getExpression();
+
+ // scan the expression for special SPARQL patterns, i.e., parts that start with
+ // SPARQL keyword followed by any character but a keyword
+ // (so if multiple keywords occur, there will be multiple matches)
+ Matcher expressionMatcher = SPECIAL_SPARQL_PATTERN.matcher(
+ expressionItem.getExpression());
+ while (expressionMatcher.find()) {
+ String match = expressionMatcher.group();
+
+ // if the match involves a FILTER (NOT) EXISTS pattern, then the braces should
+ // be scanned to find the end of the pattern
+ // (instead of considering the end as the end of the pattern match)
+ if (match.matches("^FILTER\\s+(NOT\\s+)?EXISTS\\s+\\{.*")) {
+ // find end of FILTER (NOT) EXISTS pattern
+ int braceCount = 1;
+ int loopIndex = match.indexOf("{") + 1;
+ while (braceCount > 0) {
+ char c = expressionLeftover.charAt(loopIndex);
+ if (c == '{') {
+ braceCount++;
+ } else if (c == '}') {
+ braceCount--;
+ }
+ loopIndex++;
+ }
+
+ // update match to reach from start to this end
+ match = expressionLeftover.substring(0, loopIndex).trim();
+ }
+
+ // every match will be added as a separate WHERE clause expression item in the list
+ resultItems.add(new WhereClauseExpressionItem(match));
+ // this match is removed once from the original expression
+ expressionLeftover = expressionLeftover.replaceFirst(
+ Pattern.quote(match), "").trim();
+ // since the match can be made larger than the actual pattern match
+ // (for FILTER (NOT) EXISTS patterns), a new matcher is created
+ expressionMatcher = SPECIAL_SPARQL_PATTERN.matcher(expressionLeftover);
+ }
+
+ // if the original expression still contains text, this means that this part does
+ // not start with a known SPARQL keyword => in that case, this expression should
+ // be added to one of the graphs of the stream query WHERE clause
+ if (!expressionLeftover.isEmpty()) {
+ throw new InvalidDivideQueryParserInputException(
+ String.format("SPARQL pattern without known keyword found " +
+ "outside graph in stream query WHERE clause: %s", expressionLeftover));
+ }
+ return resultItems;
+ }
+
+ /**
+ * Retrieves the input variables to be specified in a DIVIDE sensor query rule.
+ * For this, it checks which variables occur in both the antecedent of the rule
+ * (i.e., the context-dependent part of the stream query) and the RSP-QL query
+ * body. These variables will be substituted into the RSP-QL query body after
+ * the DIVIDE query derivation.
+ *
+ * @param contextPartOfSensorQueryRule context part of sensor query rule, i.e.,
+ * its antecedent
+ * @param rspQlQueryBodyVariables unbound variables in RSP-QL query body that is
+ * referenced in the sensor query rule via the
+ * query pattern
+ * @return the unbound variables that occur both in the antecedent and consequence
+ * of the sensor query rule, and that should therefore be specified as
+ * input variables
+ */
+ private List retrieveInputVariables(String contextPartOfSensorQueryRule,
+ Set rspQlQueryBodyVariables) {
+ List antecedentVariables =
+ findUnboundVariables(contextPartOfSensorQueryRule);
+
+ return antecedentVariables
+ .stream()
+ .filter(rspQlQueryBodyVariables::contains)
+ .collect(Collectors.toList());
+ }
+
+ /**
+ * Retrieves the output variables to be specified in a DIVIDE sensor query rule.
+ * For this, it checks which variables occur in the output of the stream query
+ * (this ends up in the consequence of the sensor query rule) that do NOT occur
+ * in the context part of the sensor query rule; these variables are the output
+ * variables that should be replaced by a blank node in the sensor query rule.
+ *
+ * @param contextPartOfSensorQueryRule context part of sensor query rule, i.e.,
+ * its antecedent
+ * @param streamQueryResult output of the stream query
+ * @return the unbound variables that occur in the output of the stream query,
+ * and not in the context part of the sensor query rule
+ */
+ private List retrieveOutputVariables(String contextPartOfSensorQueryRule,
+ String streamQueryResult) {
+ List antecedentVariables =
+ findUnboundVariables(contextPartOfSensorQueryRule);
+ List resultVariables = findUnboundVariables(streamQueryResult);
+
+ return resultVariables
+ .stream()
+ .filter(s -> !antecedentVariables.contains(s))
+ .collect(Collectors.toList());
+ }
+
+ /**
+ * @param queryPart any part of a SPARQL or RSP-QL query (can also be the full query body)
+ * @return a list of unbound variables that are present in the given query part
+ */
+ List findUnboundVariables(String queryPart) {
+ Matcher matcher = UNBOUND_VARIABLES_PATTERN.matcher(queryPart);
+ Set