diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9f11b75 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.idea/ diff --git a/src/divide-central/divide-api/pom.xml b/src/divide-central/divide-api/pom.xml new file mode 100644 index 0000000..dd79f9a --- /dev/null +++ b/src/divide-central/divide-api/pom.xml @@ -0,0 +1,55 @@ + + + + divide + be.ugent.idlab + 1.0 + + 4.0.0 + + divide-api + + + + maven-restlet + Public online Restlet repository + https://maven.restlet.org + + + + + + + be.ugent.idlab + divide-engine + 1.0 + + + + + org.restlet.jse + org.restlet + 2.3.6 + + + org.restlet.jee + org.restlet.ext.slf4j + 2.2.2 + + + slf4j-api + org.slf4j + + + + + + + com.google.code.gson + gson + 2.8.5 + + + \ No newline at end of file diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiApplication.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiApplication.java new file mode 100644 index 0000000..57cce05 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiApplication.java @@ -0,0 +1,65 @@ +package be.ugent.idlab.divide.api; + +import be.ugent.idlab.divide.api.endpoints.component.ComponentEndpoint; +import be.ugent.idlab.divide.api.endpoints.component.GeneralComponentEndpoint; +import be.ugent.idlab.divide.api.endpoints.query.DivideQueryEndpoint; +import be.ugent.idlab.divide.api.endpoints.query.DivideQueryRegistrationAsRspQlEndpoint; +import be.ugent.idlab.divide.api.endpoints.query.DivideQueryRegistrationAsSparqlEndpoint; +import be.ugent.idlab.divide.api.endpoints.query.GeneralDivideQueryEndpoint; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import org.restlet.Application; +import org.restlet.Restlet; +import org.restlet.routing.Router; +import org.restlet.routing.Template; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DivideApiApplication extends Application { + + private static final Logger LOGGER = LoggerFactory.getLogger(DivideApiApplication.class.getName()); + + public static final String ATTR_DIVIDE_ENGINE = "divide_engine"; + + private final IDivideEngine divideEngine; + + public DivideApiApplication(IDivideEngine divideEngine) { + this.divideEngine = divideEngine; + } + + @Override + public Restlet createInboundRoot() { + getContext().getAttributes().put(ATTR_DIVIDE_ENGINE, divideEngine); + + Router router = new Router(getContext()); + router.setDefaultMatchingMode(Template.MODE_EQUALS); + + router.attach(DivideRoutes.ENDPOINT_COMPONENT, ComponentEndpoint.class); + LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_COMPONENT); + ComponentEndpoint.logEndpoints(LOGGER); + + router.attach(DivideRoutes.ENDPOINT_COMPONENT_GENERAL, GeneralComponentEndpoint.class); + LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_COMPONENT_GENERAL); + GeneralComponentEndpoint.logEndpoints(LOGGER); + + router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY, DivideQueryEndpoint.class); + LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY); + DivideQueryEndpoint.logEndpoints(LOGGER); + + router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_SPARQL, + DivideQueryRegistrationAsSparqlEndpoint.class); + LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_SPARQL); + DivideQueryRegistrationAsSparqlEndpoint.logEndpoints(LOGGER); + + router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_RSP_QL, + DivideQueryRegistrationAsRspQlEndpoint.class); + LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY_REGISTER_AS_RSP_QL); + DivideQueryRegistrationAsRspQlEndpoint.logEndpoints(LOGGER); + + router.attach(DivideRoutes.ENDPOINT_DIVIDE_QUERY_GENERAL, GeneralDivideQueryEndpoint.class); + LOGGER.info("DIVIDE API endpoint {}", DivideRoutes.ENDPOINT_DIVIDE_QUERY_GENERAL); + GeneralDivideQueryEndpoint.logEndpoints(LOGGER); + + return router; + } + +} \ No newline at end of file diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiComponentFactory.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiComponentFactory.java new file mode 100644 index 0000000..30c6a07 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideApiComponentFactory.java @@ -0,0 +1,58 @@ +package be.ugent.idlab.divide.api; + +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import org.restlet.Component; +import org.restlet.data.Protocol; + +@SuppressWarnings("unused") +public class DivideApiComponentFactory { + + /** + * Create a Restlet {@link Component} that can be started to host an API + * for the given DIVIDE engine. This DIVIDE API will be hosted via the + * HTTP protocol on the given host and port, on the root path, + * i.e., at http://[host]:[port]/. + * + * @param divideEngine DIVIDE engine that should be wrapped by the created + * API component + * @param host host at which the DIVIDE API should run + * @param port port at which the DIVIDE API should run + * @return a Restlet {@link Component} which can be started with the + * {@link Component#start()} method to host the DIVIDE API + */ + public static Component createRestApiComponent(IDivideEngine divideEngine, + String host, + int port) { + return createRestApiComponent(divideEngine, host, port, ""); + } + + /** + * Create a Restlet {@link Component} that can be started to host an API + * for the given DIVIDE engine. This DIVIDE API will be hosted via the + * HTTP protocol on the given host and port, on the specified uri path, + * i.e., at http://[host]:[port]/[uri]. + * + * @param divideEngine DIVIDE engine that should be wrapped by the created + * API component + * @param host host at which the DIVIDE API should run + * @param port port at which the DIVIDE API should run + * @param uri path URI string at which the DIVIDE API should run + * @return a Restlet {@link Component} which can be started with the + * {@link Component#start()} method to host the DIVIDE API + */ + public static Component createRestApiComponent(IDivideEngine divideEngine, + String host, + int port, + String uri) { + // create Restlet component + Component component = new Component(); + component.getServers().add(Protocol.HTTP, host, port); + + // create and attach Servlet application + DivideApiApplication divideApiApplication = new DivideApiApplication(divideEngine); + component.getDefaultHost().attach(uri, divideApiApplication); + + return component; + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideRoutes.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideRoutes.java new file mode 100644 index 0000000..93b49ec --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/DivideRoutes.java @@ -0,0 +1,24 @@ +package be.ugent.idlab.divide.api; + +import static be.ugent.idlab.divide.api.endpoints.CustomEndpoint.SERVER_ATTR_ID; + +class DivideRoutes { + + private static final String COMPONENT_ENTITY = "component"; + private static final String QUERY_ENTITY = "query"; + + static final String ENDPOINT_COMPONENT_GENERAL = + "/" + COMPONENT_ENTITY; + static final String ENDPOINT_COMPONENT = + "/" + COMPONENT_ENTITY + "/{" + SERVER_ATTR_ID + "}"; + + static final String ENDPOINT_DIVIDE_QUERY_GENERAL = + "/" + QUERY_ENTITY; + static final String ENDPOINT_DIVIDE_QUERY = + "/" + QUERY_ENTITY + "/{" + SERVER_ATTR_ID + "}"; + static final String ENDPOINT_DIVIDE_QUERY_REGISTER_AS_SPARQL = + "/" + QUERY_ENTITY + "/sparql/{" + SERVER_ATTR_ID + "}"; + static final String ENDPOINT_DIVIDE_QUERY_REGISTER_AS_RSP_QL = + "/" + QUERY_ENTITY + "/rspql/{" + SERVER_ATTR_ID + "}"; + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/CustomEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/CustomEndpoint.java new file mode 100644 index 0000000..b9cc7a7 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/CustomEndpoint.java @@ -0,0 +1,20 @@ +package be.ugent.idlab.divide.api.endpoints; + +import be.ugent.idlab.divide.api.DivideApiApplication; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import org.restlet.resource.ServerResource; + +public abstract class CustomEndpoint extends ServerResource { + + public static final String SERVER_ATTR_ID = "id"; + + protected IDivideEngine getDivideEngine() { + return (IDivideEngine) getContext().getAttributes().get( + DivideApiApplication.ATTR_DIVIDE_ENGINE); + } + + protected String getIdAttribute() { + return (String) getRequest().getAttributes().get(SERVER_ATTR_ID); + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/ComponentEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/ComponentEndpoint.java new file mode 100644 index 0000000..bb24b4c --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/ComponentEndpoint.java @@ -0,0 +1,139 @@ +package be.ugent.idlab.divide.api.endpoints.component; + +import be.ugent.idlab.divide.api.endpoints.CustomEndpoint; +import be.ugent.idlab.divide.api.representation.component.ComponentRepresentation; +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.restlet.data.MediaType; +import org.restlet.data.Method; +import org.restlet.data.Status; +import org.restlet.representation.Representation; +import org.restlet.resource.Delete; +import org.restlet.resource.Get; +import org.restlet.resource.Options; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashSet; +import java.util.Set; + +public class ComponentEndpoint extends CustomEndpoint { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + private static final Logger LOGGER = LoggerFactory.getLogger(ComponentEndpoint.class.getName()); + + @Options + public void optionsRequestHandler() { + Set allowedMethods = new HashSet<>(); + allowedMethods.add(Method.GET); + allowedMethods.add(Method.DELETE); + getResponse().setAccessControlAllowMethods(allowedMethods); + getResponse().setAccessControlAllowOrigin("*"); + } + + public static void logEndpoints(Logger logger) { + logger.info(" GET: retrieve DIVIDE component with ID {}", SERVER_ATTR_ID); + logger.info(" DELETE: unregister DIVIDE component with ID {}", SERVER_ATTR_ID); + } + + @Get + public void getComponent() { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + String componentId = getIdAttribute(); + + IComponent component = divideEngine.getRegisteredComponentById(componentId); + + if (component != null) { + ComponentRepresentation componentRepresentation = + new ComponentRepresentation(component); + + String message = "Component with ID " + componentId + " successfully retrieved"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity(GSON.toJson(componentRepresentation), + MediaType.APPLICATION_JSON); + + } else { + String message = "Component with ID '" + componentId + "' does not exist"; + getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while getting component data"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + + @Delete + public void unregisterComponent(Representation rep) { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + String componentId = getIdAttribute(); + + IComponent component = divideEngine.getRegisteredComponentById(componentId); + + if (component != null) { + // retrieve url parameter which specifies whether the queries of this + // component should be unregistered + // (default when it is not specified = false) + boolean unregisterQueries = + Boolean.parseBoolean(getQueryValue("unregister")); + + divideEngine.unregisterComponent(componentId, unregisterQueries); + + String message = "Component with ID " + componentId + " successfully unregistered"; + getResponse().setStatus(Status.SUCCESS_NO_CONTENT, message); + + } else { + String message = "Component with ID '" + componentId + "' does not exist"; + getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while unregistering component"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/GeneralComponentEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/GeneralComponentEndpoint.java new file mode 100644 index 0000000..5879e6e --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/component/GeneralComponentEndpoint.java @@ -0,0 +1,160 @@ +package be.ugent.idlab.divide.api.endpoints.component; + +import be.ugent.idlab.divide.api.endpoints.CustomEndpoint; +import be.ugent.idlab.divide.api.representation.component.ComponentRepresentation; +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.util.component.ComponentEntry; +import be.ugent.idlab.divide.util.component.ComponentEntryParserException; +import be.ugent.idlab.divide.util.component.JsonComponentEntryParser; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.restlet.data.MediaType; +import org.restlet.data.Method; +import org.restlet.data.Status; +import org.restlet.representation.Representation; +import org.restlet.resource.Get; +import org.restlet.resource.Options; +import org.restlet.resource.Post; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +public class GeneralComponentEndpoint extends CustomEndpoint { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + private static final Logger LOGGER = LoggerFactory.getLogger(GeneralComponentEndpoint.class.getName()); + + @Options + public void optionsRequestHandler() { + Set allowedMethods = new HashSet<>(); + allowedMethods.add(Method.GET); + allowedMethods.add(Method.POST); + getResponse().setAccessControlAllowMethods(allowedMethods); + getResponse().setAccessControlAllowOrigin("*"); + } + + public static void logEndpoints(Logger logger) { + logger.info(" GET: retrieve all registered DIVIDE components"); + logger.info(" POST: register a new DIVIDE component (description in HTTP body)"); + } + + @Get + public void getComponents() { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + Collection components = new ArrayList<>(); + + for (IComponent component : divideEngine.getRegisteredComponents()) { + components.add(new ComponentRepresentation(component)); + } + + String message = "Components successfully retrieved"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity(GSON.toJson(components), MediaType.APPLICATION_JSON); + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while getting component data"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + + @Post + public void registerComponent(Representation rep) { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + if (rep != null) { + String componentEntryString = rep.getText(); + + // parse component entry + ComponentEntry componentEntry = + JsonComponentEntryParser.parseComponentEntry(componentEntryString); + + // register component + IComponent component = divideEngine.registerComponent( + new ArrayList<>(componentEntry.getContextIris()), + componentEntry.getRspQueryLanguage(), + componentEntry.getRspEngineUrl()); + + if (component != null) { + String message = "Component with ID " + component.getId() + + " successfully registered"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity( + GSON.toJson(new ComponentRepresentation(component)), + MediaType.APPLICATION_JSON); + + } else { + String message = "Component with the specified host, port and path of the " + + "RSP engine URL already exists"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } else { + String message = "No component entry information specified"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (ComponentEntryParserException e) { + String message = String.format("Component entry information invalid: %s", e.getMessage()); + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (DivideInvalidInputException e) { + String message = String.format("Component input invalid: %s", e.getMessage()); + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while registering component"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryEndpoint.java new file mode 100644 index 0000000..e3d3112 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryEndpoint.java @@ -0,0 +1,264 @@ +package be.ugent.idlab.divide.api.endpoints.query; + +import be.ugent.idlab.divide.api.endpoints.CustomEndpoint; +import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation; +import be.ugent.idlab.divide.core.context.ContextEnrichment; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.util.query.ContextEnrichmentEntry; +import be.ugent.idlab.divide.util.query.DivideQueryEntryInDivideFormat; +import be.ugent.idlab.divide.util.query.DivideQueryEntryParser; +import be.ugent.idlab.util.io.IOUtilities; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.restlet.data.MediaType; +import org.restlet.data.Method; +import org.restlet.data.Status; +import org.restlet.representation.Representation; +import org.restlet.resource.Delete; +import org.restlet.resource.Get; +import org.restlet.resource.Options; +import org.restlet.resource.Post; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashSet; +import java.util.Set; + +public class DivideQueryEndpoint extends CustomEndpoint { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + private static final Logger LOGGER = LoggerFactory.getLogger(DivideQueryEndpoint.class.getName()); + + @Options + public void optionsRequestHandler() { + Set allowedMethods = new HashSet<>(); + allowedMethods.add(Method.GET); + allowedMethods.add(Method.DELETE); + allowedMethods.add(Method.POST); + getResponse().setAccessControlAllowMethods(allowedMethods); + getResponse().setAccessControlAllowOrigin("*"); + } + + public static void logEndpoints(Logger logger) { + logger.info(" GET: retrieve DIVIDE query with ID {}", SERVER_ATTR_ID); + logger.info(" POST: register DIVIDE query with ID {}" + + " (JSON description of DIVIDE query inputs in HTTP body)", SERVER_ATTR_ID); + logger.info(" DELETE: unregister DIVIDE query with ID {}", SERVER_ATTR_ID); + } + + @Get + public void getDivideQuery() { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + String divideQueryName = getIdAttribute(); + + IDivideQuery divideQuery = divideEngine.getDivideQueryByName(divideQueryName); + + if (divideQuery != null) { + DivideQueryRepresentation divideQueryRepresentation = + new DivideQueryRepresentation(divideQuery); + + String message = "DIVIDE query with name '" + divideQueryName + "' successfully retrieved"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity(GSON.toJson(divideQueryRepresentation), + MediaType.APPLICATION_JSON); + + } else { + String message = "DIVIDE query with name '" + divideQueryName + "' does not exist"; + getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while getting DIVIDE query data"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + + @Post + public void addDivideQuery(Representation rep) { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + String divideQueryName = getIdAttribute(); + + if (divideEngine.getDivideQueryByName(divideQueryName) == null) { + + if (rep != null) { + String divideQueryJson = rep.getText(); + + DivideQueryEntryInDivideFormat divideQueryEntry; + String queryPattern; + String sensorQueryRule; + String goal; + try { + // parse DIVIDE query JSON + divideQueryEntry = DivideQueryEntryParser.parseDivideQueryEntryInDivideFormat(divideQueryJson); + queryPattern = divideQueryEntry.getQueryPattern(); + sensorQueryRule = divideQueryEntry.getSensorQueryRule(); + goal = divideQueryEntry.getGoal(); + + } catch (Exception e) { + String message = "Specified DIVIDE query information is no valid JSON"; + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + return; + } + + // ensure all required information is provided + if (queryPattern != null && !queryPattern.isEmpty() && + sensorQueryRule != null && !sensorQueryRule.isEmpty() && + goal != null && !goal.isEmpty()) { + + // create context enrichment + ContextEnrichmentEntry contextEnrichmentEntry = + divideQueryEntry.getContextEnrichment(); + ContextEnrichment contextEnrichment; + if (contextEnrichmentEntry == null + || contextEnrichmentEntry.getQueries() == null + || contextEnrichmentEntry.getQueries().isEmpty()) { + contextEnrichment = new ContextEnrichment(); + } else { + contextEnrichment = new ContextEnrichment( + contextEnrichmentEntry.doReasoning(), + contextEnrichmentEntry.executeOnOntologyTriples(), + contextEnrichmentEntry.getQueries()); + } + + // add query to DIVIDE engine + // (response cannot be null since it was checked before whether + // query with this name already exists) + IDivideQuery divideQuery = divideEngine.addDivideQuery( + divideQueryName, + IOUtilities.removeWhiteSpace(queryPattern).replaceAll("\r", " "), + IOUtilities.removeWhiteSpace(sensorQueryRule).replaceAll("\r", " "), + IOUtilities.removeWhiteSpace(goal).replaceAll("\r", " "), + contextEnrichment); + + String message = "DIVIDE query with name '" + divideQueryName + + "' successfully registered"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity( + GSON.toJson(new DivideQueryRepresentation(divideQuery)), + MediaType.APPLICATION_JSON); + + } else { + String message = "Not all required DIVIDE query JSON information " + + "is specified and non-empty"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } else { + String message = "No DIVIDE query JSON information specified"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } else { + String message = "DIVIDE query with the specified name already exists"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (DivideInvalidInputException e) { + String message = String.format("Query input invalid: %s", e.getMessage()); + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while adding DIVIDE query"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + + @Delete + public void removeDivideQuery(Representation rep) { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + String divideQueryName = getIdAttribute(); + + IDivideQuery divideQuery = divideEngine.getDivideQueryByName(divideQueryName); + + if (divideQuery != null) { + // retrieve url parameter which specifies whether the queries of this + // DIVIDE query should be unregistered + // (default when it is not specified = true) + boolean unregisterQueries = !"false".equals(getQueryValue("unregister")); + + divideEngine.removeDivideQuery(divideQueryName, unregisterQueries); + + String message = "DIVIDE query with name " + divideQueryName + " successfully unregistered"; + getResponse().setStatus(Status.SUCCESS_NO_CONTENT, message); + + } else { + String message = "DIVIDE query with name '" + divideQueryName + "' does not exist"; + getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while removing DIVIDE query"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsRspQlEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsRspQlEndpoint.java new file mode 100644 index 0000000..7c8dbb9 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsRspQlEndpoint.java @@ -0,0 +1,166 @@ +package be.ugent.idlab.divide.api.endpoints.query; + +import be.ugent.idlab.divide.api.endpoints.CustomEndpoint; +import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation; +import be.ugent.idlab.divide.core.context.ContextEnrichment; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput; +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserOutput; +import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException; +import be.ugent.idlab.divide.util.query.ContextEnrichmentEntry; +import be.ugent.idlab.divide.util.query.DivideQueryEntryInQueryFormat; +import be.ugent.idlab.divide.util.query.DivideQueryEntryParser; +import be.ugent.idlab.util.io.IOUtilities; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.restlet.data.MediaType; +import org.restlet.data.Method; +import org.restlet.data.Status; +import org.restlet.representation.Representation; +import org.restlet.resource.Options; +import org.restlet.resource.Post; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashSet; +import java.util.Set; + +public class DivideQueryRegistrationAsRspQlEndpoint extends CustomEndpoint { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + private static final Logger LOGGER = + LoggerFactory.getLogger(DivideQueryRegistrationAsRspQlEndpoint.class.getName()); + + @Options + public void optionsRequestHandler() { + Set allowedMethods = new HashSet<>(); + allowedMethods.add(Method.POST); + getResponse().setAccessControlAllowMethods(allowedMethods); + getResponse().setAccessControlAllowOrigin("*"); + } + + public static void logEndpoints(Logger logger) { + logger.info(" POST: register DIVIDE query with ID {}" + + " (JSON description of RSP-QL input in HTTP body)", SERVER_ATTR_ID); + } + + @Post + public void addDivideQuery(Representation rep) { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + String divideQueryName = getIdAttribute(); + + if (divideEngine.getDivideQueryByName(divideQueryName) == null) { + + String divideQueryJson; + if (rep != null && (divideQueryJson = rep.getText()) != null + && !divideQueryJson.trim().isEmpty()) { + + // parse JSON entry to real DIVIDE query parser & context enrichment input + DivideQueryEntryInQueryFormat divideQueryEntryInQueryFormat = + DivideQueryEntryParser.parseRspQlEntryAsDivideQuery(divideQueryJson); + DivideQueryParserInput divideQueryParserInput = + divideQueryEntryInQueryFormat.getDivideQueryParserInput(); + ContextEnrichmentEntry contextEnrichmentEntry = + divideQueryEntryInQueryFormat.getContextEnrichmentEntry(); + + // parse RSP-QL input to actual DIVIDE query inputs + DivideQueryParserOutput divideQueryParserOutput = + divideEngine.getQueryParser(). + parseDivideQuery(divideQueryParserInput); + + // ensure all required information is provided + if (divideQueryParserOutput.isNonEmpty()) { + + // create context enrichment + ContextEnrichment contextEnrichment; + if (contextEnrichmentEntry == null + || contextEnrichmentEntry.getQueries() == null + || contextEnrichmentEntry.getQueries().isEmpty()) { + contextEnrichment = new ContextEnrichment(); + } else { + contextEnrichment = new ContextEnrichment( + contextEnrichmentEntry.doReasoning(), + contextEnrichmentEntry.executeOnOntologyTriples(), + contextEnrichmentEntry.getQueries()); + } + + // add query to DIVIDE engine + // (response cannot be null since it was checked before whether + // query with this name already exists) + IDivideQuery divideQuery = divideEngine.addDivideQuery( + divideQueryName, + IOUtilities.removeWhiteSpace( + divideQueryParserOutput.getQueryPattern()).replaceAll("\r", " "), + IOUtilities.removeWhiteSpace( + divideQueryParserOutput.getSensorQueryRule()).replaceAll("\r", " "), + IOUtilities.removeWhiteSpace( + divideQueryParserOutput.getGoal()).replaceAll("\r", " "), + contextEnrichment); + + String message = "DIVIDE query with name '" + divideQueryName + + "' successfully registered"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity( + GSON.toJson(new DivideQueryRepresentation(divideQuery)), + MediaType.APPLICATION_JSON); + + } else { + String message = "Input leads to empty DIVIDE query fields"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } else { + String message = "No DIVIDE query JSON information specified"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } else { + String message = "DIVIDE query with the specified name already exists"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (InvalidDivideQueryParserInputException e) { + String message = String.format("JSON representing RSP-QL query " + + "input is invalid: %s", e.getMessage()); + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (DivideInvalidInputException e) { + String message = String.format("Query input invalid: %s", e.getMessage()); + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while adding DIVIDE query"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsSparqlEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsSparqlEndpoint.java new file mode 100644 index 0000000..4cd0fa0 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/DivideQueryRegistrationAsSparqlEndpoint.java @@ -0,0 +1,166 @@ +package be.ugent.idlab.divide.api.endpoints.query; + +import be.ugent.idlab.divide.api.endpoints.CustomEndpoint; +import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation; +import be.ugent.idlab.divide.core.context.ContextEnrichment; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput; +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserOutput; +import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException; +import be.ugent.idlab.divide.util.query.ContextEnrichmentEntry; +import be.ugent.idlab.divide.util.query.DivideQueryEntryInQueryFormat; +import be.ugent.idlab.divide.util.query.DivideQueryEntryParser; +import be.ugent.idlab.util.io.IOUtilities; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.restlet.data.MediaType; +import org.restlet.data.Method; +import org.restlet.data.Status; +import org.restlet.representation.Representation; +import org.restlet.resource.Options; +import org.restlet.resource.Post; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashSet; +import java.util.Set; + +public class DivideQueryRegistrationAsSparqlEndpoint extends CustomEndpoint { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + private static final Logger LOGGER = + LoggerFactory.getLogger(DivideQueryRegistrationAsSparqlEndpoint.class.getName()); + + @Options + public void optionsRequestHandler() { + Set allowedMethods = new HashSet<>(); + allowedMethods.add(Method.POST); + getResponse().setAccessControlAllowMethods(allowedMethods); + getResponse().setAccessControlAllowOrigin("*"); + } + + public static void logEndpoints(Logger logger) { + logger.info(" POST: register DIVIDE query with ID {}" + + " (JSON description of SPARQL inputs in HTTP body)", SERVER_ATTR_ID); + } + + @Post + public void addDivideQuery(Representation rep) { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + String divideQueryName = getIdAttribute(); + + if (divideEngine.getDivideQueryByName(divideQueryName) == null) { + + String divideQueryJson; + if (rep != null && (divideQueryJson = rep.getText()) != null + && !divideQueryJson.trim().isEmpty()) { + + // parse JSON entry to real DIVIDE query parser & context enrichment input + DivideQueryEntryInQueryFormat divideQueryEntryInQueryFormat = + DivideQueryEntryParser.parseSparqlEntryAsDivideQuery(divideQueryJson); + DivideQueryParserInput divideQueryParserInput = + divideQueryEntryInQueryFormat.getDivideQueryParserInput(); + ContextEnrichmentEntry contextEnrichmentEntry = + divideQueryEntryInQueryFormat.getContextEnrichmentEntry(); + + // parse SPARQL input to actual DIVIDE query inputs + DivideQueryParserOutput divideQueryParserOutput = + divideEngine.getQueryParser(). + parseDivideQuery(divideQueryParserInput); + + // ensure all required information is provided + if (divideQueryParserOutput.isNonEmpty()) { + + // create context enrichment + ContextEnrichment contextEnrichment; + if (contextEnrichmentEntry == null + || contextEnrichmentEntry.getQueries() == null + || contextEnrichmentEntry.getQueries().isEmpty()) { + contextEnrichment = new ContextEnrichment(); + } else { + contextEnrichment = new ContextEnrichment( + contextEnrichmentEntry.doReasoning(), + contextEnrichmentEntry.executeOnOntologyTriples(), + contextEnrichmentEntry.getQueries()); + } + + // add query to DIVIDE engine + // (response cannot be null since it was checked before whether + // query with this name already exists) + IDivideQuery divideQuery = divideEngine.addDivideQuery( + divideQueryName, + IOUtilities.removeWhiteSpace( + divideQueryParserOutput.getQueryPattern()).replaceAll("\r", " "), + IOUtilities.removeWhiteSpace( + divideQueryParserOutput.getSensorQueryRule()).replaceAll("\r", " "), + IOUtilities.removeWhiteSpace( + divideQueryParserOutput.getGoal()).replaceAll("\r", " "), + contextEnrichment); + + String message = "DIVIDE query with name '" + divideQueryName + + "' successfully registered"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity( + GSON.toJson(new DivideQueryRepresentation(divideQuery)), + MediaType.APPLICATION_JSON); + + } else { + String message = "Input leads to empty DIVIDE query fields"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } else { + String message = "No DIVIDE query JSON information specified"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } else { + String message = "DIVIDE query with the specified name already exists"; + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + } + + } catch (InvalidDivideQueryParserInputException e) { + String message = String.format("JSON representing SPARQL query " + + "input is invalid: %s", e.getMessage()); + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (DivideInvalidInputException e) { + String message = String.format("Query input invalid: %s", e.getMessage()); + LOGGER.error(message, e); + getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while adding DIVIDE query"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/GeneralDivideQueryEndpoint.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/GeneralDivideQueryEndpoint.java new file mode 100644 index 0000000..e453691 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/endpoints/query/GeneralDivideQueryEndpoint.java @@ -0,0 +1,78 @@ +package be.ugent.idlab.divide.api.endpoints.query; + +import be.ugent.idlab.divide.api.endpoints.CustomEndpoint; +import be.ugent.idlab.divide.api.representation.query.DivideQueryRepresentation; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.restlet.data.MediaType; +import org.restlet.data.Method; +import org.restlet.data.Status; +import org.restlet.resource.Get; +import org.restlet.resource.Options; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; + +public class GeneralDivideQueryEndpoint extends CustomEndpoint { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + private static final Logger LOGGER = LoggerFactory.getLogger(GeneralDivideQueryEndpoint.class.getName()); + + @Options + public void optionsRequestHandler() { + Set allowedMethods = new HashSet<>(); + allowedMethods.add(Method.GET); + getResponse().setAccessControlAllowMethods(allowedMethods); + getResponse().setAccessControlAllowOrigin("*"); + } + + public static void logEndpoints(Logger logger) { + logger.info(" GET: retrieve all registered DIVIDE queries"); + } + + @Get + public void getQueries() { + getResponse().setAccessControlAllowOrigin("*"); + + IDivideEngine divideEngine = getDivideEngine(); + + try { + Collection components = new ArrayList<>(); + + for (IDivideQuery divideQuery : divideEngine.getDivideQueries()) { + components.add(new DivideQueryRepresentation(divideQuery)); + } + + String message = "DIVIDE queries successfully retrieved"; + getResponse().setStatus(Status.SUCCESS_OK, message); + getResponse().setEntity(GSON.toJson(components), MediaType.APPLICATION_JSON); + + } catch (DivideNotInitializedException e) { + String message = e.getMessage(); + LOGGER.error(message, e); + getResponse().setStatus(Status.SERVER_ERROR_SERVICE_UNAVAILABLE, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } catch (Exception e) { + String logMessage = "Error while retrieving DIVIDE queries"; + String eMessage = e.getMessage(); + String message = logMessage + (eMessage != null ? ": " + eMessage : ""); + LOGGER.error(logMessage, e); + getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, message); + getResponse().setEntity(message, MediaType.TEXT_PLAIN); + + } finally { + getResponse().commit(); + commit(); + release(); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/ComponentRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/ComponentRepresentation.java new file mode 100644 index 0000000..c90309f --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/ComponentRepresentation.java @@ -0,0 +1,21 @@ +package be.ugent.idlab.divide.api.representation.component; + +import be.ugent.idlab.divide.core.component.IComponent; + +import java.util.List; + +@SuppressWarnings({"FieldCanBeLocal", "unused"}) +public class ComponentRepresentation { + + private final String id; + private final List contextIris; + private final RspEngineRepresentation rspEngine; + + public ComponentRepresentation(IComponent component) { + this.id = component.getId(); + this.contextIris = component.getContextIris(); + this.rspEngine = new RspEngineRepresentation( + component.getRspEngineHandler().getRspEngine()); + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspEngineRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspEngineRepresentation.java new file mode 100644 index 0000000..f1e896a --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspEngineRepresentation.java @@ -0,0 +1,27 @@ +package be.ugent.idlab.divide.api.representation.component; + + +import be.ugent.idlab.divide.rsp.engine.IRspEngine; +import be.ugent.idlab.divide.rsp.query.IRspQuery; + +import java.util.ArrayList; +import java.util.List; + +@SuppressWarnings({"FieldCanBeLocal", "unused", "WeakerAccess", "MismatchedQueryAndUpdateOfCollection"}) +public class RspEngineRepresentation { + + private final String queryLanguage; + private final String url; + private final List registeredQueries; + + public RspEngineRepresentation(IRspEngine engine) { + this.queryLanguage = engine.getRspQueryLanguage().toString().toLowerCase(); + this.url = engine.getBaseUrl(); + + this.registeredQueries = new ArrayList<>(); + for (IRspQuery rspQuery : engine.getRegisteredQueries()) { + registeredQueries.add(new RspQueryRepresentation(rspQuery)); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspQueryRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspQueryRepresentation.java new file mode 100644 index 0000000..1016d65 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/component/RspQueryRepresentation.java @@ -0,0 +1,16 @@ +package be.ugent.idlab.divide.api.representation.component; + +import be.ugent.idlab.divide.rsp.query.IRspQuery; + +@SuppressWarnings({"FieldCanBeLocal", "unused", "WeakerAccess"}) +public class RspQueryRepresentation { + + private final String queryName; + private final String queryBody; + + public RspQueryRepresentation(IRspQuery query) { + this.queryName = query.getQueryName(); + this.queryBody = query.getQueryBody(); + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/ContextEnrichmentRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/ContextEnrichmentRepresentation.java new file mode 100644 index 0000000..c626e28 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/ContextEnrichmentRepresentation.java @@ -0,0 +1,44 @@ +package be.ugent.idlab.divide.api.representation.query; + +import be.ugent.idlab.divide.core.context.ContextEnrichingQuery; +import be.ugent.idlab.divide.core.context.ContextEnrichment; + +import java.util.List; +import java.util.stream.Collectors; + +@SuppressWarnings({"FieldCanBeLocal", "unused", "WeakerAccess"}) +public class ContextEnrichmentRepresentation { + + private final boolean doReasoning; + private final boolean executeOnOntologyTriples; + private final List queries; + + public ContextEnrichmentRepresentation(ContextEnrichment contextEnrichment) { + switch(contextEnrichment.getMode()) { + case EXECUTE_ON_CONTEXT_WITHOUT_REASONING: + this.doReasoning = false; + this.executeOnOntologyTriples = false; + + break; + case EXECUTE_ON_CONTEXT_WITH_REASONING: + this.doReasoning = true; + this.executeOnOntologyTriples = false; + + break; + case EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING: + this.doReasoning = false; + this.executeOnOntologyTriples = true; + + break; + case EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING: + default: + this.doReasoning = true; + this.executeOnOntologyTriples = true; + } + + this.queries = contextEnrichment.getQueries().stream() + .map(ContextEnrichingQuery::getQuery) + .collect(Collectors.toList()); + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/DivideQueryRepresentation.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/DivideQueryRepresentation.java new file mode 100644 index 0000000..aedaec8 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/api/representation/query/DivideQueryRepresentation.java @@ -0,0 +1,23 @@ +package be.ugent.idlab.divide.api.representation.query; + +import be.ugent.idlab.divide.core.query.IDivideQuery; + +@SuppressWarnings({"FieldCanBeLocal", "unused"}) +public class DivideQueryRepresentation { + + private final String name; + private final String queryPattern; + private final String sensorQueryRule; + private final String goal; + private final ContextEnrichmentRepresentation contextEnrichment; + + public DivideQueryRepresentation(IDivideQuery divideQuery) { + this.name = divideQuery.getName(); + this.queryPattern = divideQuery.getQueryPattern(); + this.sensorQueryRule = divideQuery.getSensorQueryRule(); + this.goal = divideQuery.getGoal(); + this.contextEnrichment = new ContextEnrichmentRepresentation( + divideQuery.getContextEnrichment()); + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntry.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntry.java new file mode 100644 index 0000000..ae43740 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntry.java @@ -0,0 +1,33 @@ +package be.ugent.idlab.divide.util.component; + +import be.ugent.idlab.divide.rsp.RspQueryLanguage; + +import java.util.List; + +public class ComponentEntry { + + private final List contextIris; + private final RspQueryLanguage rspQueryLanguage; + private final String rspEngineUrl; + + public ComponentEntry(List contextIris, + RspQueryLanguage rspQueryLanguage, + String rspEngineUrl) { + this.contextIris = contextIris; + this.rspQueryLanguage = rspQueryLanguage; + this.rspEngineUrl = rspEngineUrl; + } + + public List getContextIris() { + return contextIris; + } + + public RspQueryLanguage getRspQueryLanguage() { + return rspQueryLanguage; + } + + public String getRspEngineUrl() { + return rspEngineUrl; + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParser.java new file mode 100644 index 0000000..e1dd4f7 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParser.java @@ -0,0 +1,48 @@ +package be.ugent.idlab.divide.util.component; + +import be.ugent.idlab.divide.rsp.RspQueryLanguage; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.List; + +public class ComponentEntryParser { + + static void validateContextIris(List contextIris) + throws ComponentEntryParserException{ + for (String contextIri : contextIris) { + if (contextIri == null || contextIri.trim().isEmpty()) { + throw new ComponentEntryParserException( + "Component entry contains empty context IRIs"); + } + } + } + + static RspQueryLanguage parseRspEngineQueryLanguage(String input) + throws ComponentEntryParserException { + RspQueryLanguage rspQueryLanguage = RspQueryLanguage.fromString(input.trim()); + if (rspQueryLanguage == null) { + throw new ComponentEntryParserException(String.format( + "Component entry contains invalid/unsupported RSP query language '%s'", + input)); + } + return rspQueryLanguage; + } + + static void validateRspEngineUrl(String rspEngineUrl) + throws ComponentEntryParserException { + try { + URL url = new URL(rspEngineUrl); + if (!url.getProtocol().equals("http") && !url.getProtocol().equals("https")) { + throw new ComponentEntryParserException(String.format( + "Component entry contains non HTTP(S) RSP engine URL '%s'", + rspEngineUrl)); + } + } catch (MalformedURLException e) { + throw new ComponentEntryParserException(String.format( + "Component entry contains invalid RSP engine URL '%s'", + rspEngineUrl)); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParserException.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParserException.java new file mode 100644 index 0000000..e1fe029 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/ComponentEntryParserException.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.util.component; + +@SuppressWarnings("unused") +public class ComponentEntryParserException extends Exception { + + public ComponentEntryParserException(String description, Exception base) { + super(description, base); + } + + public ComponentEntryParserException(String description) { + super(description); + } + + public ComponentEntryParserException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/CsvComponentEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/CsvComponentEntryParser.java new file mode 100644 index 0000000..3f8efb5 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/CsvComponentEntryParser.java @@ -0,0 +1,95 @@ +package be.ugent.idlab.divide.util.component; + +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import be.ugent.idlab.util.io.IOUtilities; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Class used for parsing configuration of component entries. + * These configurations can be present in a CSV file (e.g. used during start-up + * of a DIVIDE server with a known set of components) or in the HTTP body of + * a component creation request to the DIVIDE API. + */ +public class CsvComponentEntryParser { + + private static final String DELIMITER = ";"; + + /** + * Parses a CSV file containing DIVIDE component entries on each line. + * A single line uses the delimiter ';' to split the different elements + * of the configuration entry. + * + * @param csvFile path to CSV file containing component configurations + * @return a list of parsed component entries of which the gettable fields can + * directly be used as input for the registration of components in a + * DIVIDE engine using the {@link IDivideEngine#registerComponent( + * List, RspQueryLanguage, String)} method + * @throws ComponentEntryParserException if a component configuration in the CSV file is + * invalid (invalid list of additional context IRIs, + * invalid RSP engine URL, or invalid + * RSP query language) + * @throws IllegalArgumentException if CSV file does not exist or is empty + */ + public static List parseComponentEntryFile(String csvFile) + throws ComponentEntryParserException { + List componentEntries = new ArrayList<>(); + + // read CSV file + List componentEntryStrings = IOUtilities.readCsvFile(csvFile, DELIMITER); + if (componentEntryStrings.isEmpty()) { + throw new IllegalArgumentException("CSV file does not exist or is empty"); + } + + // parse component entries in CSV file + for (String[] componentEntryString : componentEntryStrings) { + componentEntries.add(parseComponentEntry(componentEntryString)); + } + + return componentEntries; + } + + private static ComponentEntry parseComponentEntry(String[] entry) + throws ComponentEntryParserException { + if (entry.length == 4) { + // retrieve main context IRI + String mainContextIri = entry[0].trim(); + + // convert array string to actual array of additional context IRIs + if (!entry[1].trim().matches("\\[[^\\[\\]]+]")) { + throw new ComponentEntryParserException( + "Component entry contains invalid list of additional IRIs"); + } + List contextIris = new ArrayList<>(); + contextIris.add(mainContextIri); + if (!entry[1].replace(" ", "").replace("\t", "").trim().equals("[]")) { + contextIris.addAll( + Arrays.stream(entry[1].replace("[", "").replace("]", "").split(",")) + .map(String::trim) + .collect(Collectors.toList())); + } + ComponentEntryParser.validateContextIris(contextIris); + + // parse RSP query language + RspQueryLanguage rspQueryLanguage = + ComponentEntryParser.parseRspEngineQueryLanguage(entry[2]); + + // parse RSP engine URL + String rspEngineUrl = entry[3].trim(); + ComponentEntryParser.validateRspEngineUrl(rspEngineUrl); + + // if no errors, then return new component entry + return new ComponentEntry( + contextIris, rspQueryLanguage, rspEngineUrl); + + } else { + throw new ComponentEntryParserException( + "Component entry does not contain 4 elements"); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntry.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntry.java new file mode 100644 index 0000000..24edb75 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntry.java @@ -0,0 +1,67 @@ +package be.ugent.idlab.divide.util.component; + +import java.util.List; + +@SuppressWarnings("unused") +public class JsonComponentEntry { + + private List contextIris; + private RspEngineEntry rspEngine; + + public JsonComponentEntry(List contextIris, RspEngineEntry rspEngine) { + this.contextIris = contextIris; + this.rspEngine = rspEngine; + } + + public List getContextIris() { + return contextIris; + } + + public void setContextIris(List contextIris) { + this.contextIris = contextIris; + } + + public RspEngineEntry getRspEngine() { + return rspEngine; + } + + public void setRspEngine(RspEngineEntry rspEngine) { + this.rspEngine = rspEngine; + } + + public boolean validateIfNonNull() { + return contextIris != null && + rspEngine != null && + rspEngine.queryLanguage != null && + rspEngine.url != null; + } + + static class RspEngineEntry { + + public RspEngineEntry(String queryLanguage, String url) { + this.queryLanguage = queryLanguage; + this.url = url; + } + + private String queryLanguage; + private String url; + + public String getQueryLanguage() { + return queryLanguage; + } + + public void setQueryLanguage(String queryLanguage) { + this.queryLanguage = queryLanguage; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntryParser.java new file mode 100644 index 0000000..3e60d92 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/component/JsonComponentEntryParser.java @@ -0,0 +1,71 @@ +package be.ugent.idlab.divide.util.component; + +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +import java.util.List; +import java.util.stream.Collectors; + +/** + * Class used for parsing configuration of component entries. + * These configurations can be present in a CSV file (e.g. used during start-up + * of a DIVIDE server with a known set of components) or in the HTTP body of + * a component creation request to the DIVIDE API. + */ +public class JsonComponentEntryParser { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + + /** + * Parses a DIVIDE component entry, which is specified in JSON format. + * + * @param json component configuration as a JSON string + * @return a parsed component entry of which the gettable fields can + * directly be used as input for the registration of components in a + * DIVIDE engine using the {@link IDivideEngine#registerComponent( + * List, RspQueryLanguage, String)} method + * @throws ComponentEntryParserException if the specified component configuration is + * not in the required JSON format + */ + public static ComponentEntry parseComponentEntry(String json) + throws ComponentEntryParserException { + // parse json + JsonComponentEntry jsonComponentEntry = + GSON.fromJson(json, JsonComponentEntry.class); + + // check if all fields are non-null + boolean valid = jsonComponentEntry.validateIfNonNull(); + if (!valid) { + throw new ComponentEntryParserException("Not all required JSON fields are present"); + } + + return parseComponentEntry(jsonComponentEntry); + } + + private static ComponentEntry parseComponentEntry(JsonComponentEntry jsonComponentEntry) + throws ComponentEntryParserException { + // parse context IRIs + List contextIris = jsonComponentEntry.getContextIris().stream() + .map(String::trim) + .collect(Collectors.toList()); + ComponentEntryParser.validateContextIris(contextIris); + + // parse RSP query language + RspQueryLanguage rspQueryLanguage = + ComponentEntryParser.parseRspEngineQueryLanguage( + jsonComponentEntry.getRspEngine().getQueryLanguage()); + + // parse RSP engine registration URL + String rspEngineUrl = jsonComponentEntry.getRspEngine().getUrl(); + ComponentEntryParser.validateRspEngineUrl(rspEngineUrl); + + // if no errors, then return new component entry + return new ComponentEntry( + contextIris, + rspQueryLanguage, + rspEngineUrl); + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/ContextEnrichmentEntry.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/ContextEnrichmentEntry.java new file mode 100644 index 0000000..99a6ebb --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/ContextEnrichmentEntry.java @@ -0,0 +1,43 @@ +package be.ugent.idlab.divide.util.query; + +import java.util.ArrayList; +import java.util.List; + +@SuppressWarnings("unused") +public class ContextEnrichmentEntry { + + private boolean doReasoning; + private boolean executeOnOntologyTriples; + private List queries; + + public ContextEnrichmentEntry() { + this.doReasoning = true; + this.executeOnOntologyTriples = true; + this.queries = new ArrayList<>(); + } + + public boolean doReasoning() { + return doReasoning; + } + + public void setDoReasoning(boolean doReasoning) { + this.doReasoning = doReasoning; + } + + public boolean executeOnOntologyTriples() { + return executeOnOntologyTriples; + } + + public void setExecuteOnOntologyTriples(boolean executeOnOntologyTriples) { + this.executeOnOntologyTriples = executeOnOntologyTriples; + } + + public List getQueries() { + return queries; + } + + public void setQueries(List queries) { + this.queries = queries; + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInDivideFormat.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInDivideFormat.java new file mode 100644 index 0000000..0350323 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInDivideFormat.java @@ -0,0 +1,48 @@ +package be.ugent.idlab.divide.util.query; + +@SuppressWarnings("unused") +public class DivideQueryEntryInDivideFormat { + + private String queryPattern; + private String sensorQueryRule; + private String goal; + + private ContextEnrichmentEntry contextEnrichment; + + public DivideQueryEntryInDivideFormat() { + // empty on purpose + } + + public String getQueryPattern() { + return queryPattern; + } + + public void setQueryPattern(String queryPattern) { + this.queryPattern = queryPattern; + } + + public String getSensorQueryRule() { + return sensorQueryRule; + } + + public void setSensorQueryRule(String sensorQueryRule) { + this.sensorQueryRule = sensorQueryRule; + } + + public String getGoal() { + return goal; + } + + public void setGoal(String goal) { + this.goal = goal; + } + + public ContextEnrichmentEntry getContextEnrichment() { + return contextEnrichment; + } + + public void setContextEnrichment(ContextEnrichmentEntry contextEnrichmentEntry) { + this.contextEnrichment = contextEnrichmentEntry; + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInQueryFormat.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInQueryFormat.java new file mode 100644 index 0000000..78324a1 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryInQueryFormat.java @@ -0,0 +1,24 @@ +package be.ugent.idlab.divide.util.query; + +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput; + +public class DivideQueryEntryInQueryFormat { + + DivideQueryParserInput divideQueryParserInput; + ContextEnrichmentEntry contextEnrichmentEntry; + + public DivideQueryEntryInQueryFormat(DivideQueryParserInput divideQueryParserInput, + ContextEnrichmentEntry contextEnrichmentEntry) { + this.divideQueryParserInput = divideQueryParserInput; + this.contextEnrichmentEntry = contextEnrichmentEntry; + } + + public DivideQueryParserInput getDivideQueryParserInput() { + return divideQueryParserInput; + } + + public ContextEnrichmentEntry getContextEnrichmentEntry() { + return contextEnrichmentEntry; + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParser.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParser.java new file mode 100644 index 0000000..56926b2 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParser.java @@ -0,0 +1,72 @@ +package be.ugent.idlab.divide.util.query; + +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput; +import be.ugent.idlab.divide.core.query.parser.InputQueryLanguage; +import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.JsonSyntaxException; + +public class DivideQueryEntryParser { + + private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); + + public static DivideQueryEntryInQueryFormat parseSparqlEntryAsDivideQuery(String json) + throws InvalidDivideQueryParserInputException { + // parse DIVIDE query parser input + DivideQueryParserInput input; + try { + input = GSON.fromJson(json, DivideQueryParserInput.class); + input.setInputQueryLanguage(InputQueryLanguage.SPARQL); + } catch (JsonSyntaxException e) { + throw new InvalidDivideQueryParserInputException("Invalid JSON syntax", e); + } + + // parse context enrichment entry + JsonObject jsonObject = new JsonParser().parse(json).getAsJsonObject(); + ContextEnrichmentEntry contextEnrichmentEntry = jsonObject.get("contextEnrichment") != null ? + GSON.fromJson(jsonObject.get("contextEnrichment").toString(), + ContextEnrichmentEntry.class) : null; + + return new DivideQueryEntryInQueryFormat(input, contextEnrichmentEntry); + } + + public static DivideQueryEntryInQueryFormat parseRspQlEntryAsDivideQuery(String json) + throws InvalidDivideQueryParserInputException { + DivideQueryParserInput input; + try { + input = GSON.fromJson(json, DivideQueryParserInput.class); + input.setInputQueryLanguage(InputQueryLanguage.RSP_QL); + } catch (JsonSyntaxException e) { + throw new InvalidDivideQueryParserInputException("Invalid JSON syntax", e); + } + + // parse context enrichment entry + JsonObject jsonObject = new JsonParser().parse(json).getAsJsonObject(); + ContextEnrichmentEntry contextEnrichmentEntry = jsonObject.get("contextEnrichment") != null ? + GSON.fromJson(jsonObject.get("contextEnrichment").toString(), + ContextEnrichmentEntry.class) : null; + + return new DivideQueryEntryInQueryFormat(input, contextEnrichmentEntry); + } + + public static DivideQueryEntryInDivideFormat parseDivideQueryEntryInDivideFormat(String json) + throws DivideQueryEntryParserException { + // check if any json is given: + // if not, no context enrichment entry is defined -> return empty entry + if (json == null || json.trim().isEmpty()) { + return new DivideQueryEntryInDivideFormat(); + } + + // parse json + try { + return GSON.fromJson(json, DivideQueryEntryInDivideFormat.class); + } catch (Exception e) { + throw new DivideQueryEntryParserException( + "DIVIDE query is not in expected JSON format", e); + } + } + +} diff --git a/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParserException.java b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParserException.java new file mode 100644 index 0000000..d77b909 --- /dev/null +++ b/src/divide-central/divide-api/src/main/java/be/ugent/idlab/divide/util/query/DivideQueryEntryParserException.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.util.query; + +@SuppressWarnings("unused") +public class DivideQueryEntryParserException extends Exception { + + public DivideQueryEntryParserException(String description, Exception base) { + super(description, base); + } + + public DivideQueryEntryParserException(String description) { + super(description); + } + + public DivideQueryEntryParserException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-engine/pom.xml b/src/divide-central/divide-engine/pom.xml new file mode 100644 index 0000000..67e8220 --- /dev/null +++ b/src/divide-central/divide-engine/pom.xml @@ -0,0 +1,72 @@ + + + + divide + be.ugent.idlab + 1.0 + + 4.0.0 + + divide-engine + + + UTF-8 + 1.8 + 1.8 + + + + + + be.ugent.idlab + knowledge-base-common + 1.0 + + + be.ugent.idlab + knowledge-base-jena3 + 2.0 + + + be.ugent.idlab + knowledge-base-api + 1.0 + + + + + commons-configuration + commons-configuration + 1.10 + + + + + be.ugent.idlab + rdf-utilities + 1.0 + + + be.ugent.idlab + rdf-utilities-jena3-owlapi4 + 1.0 + + + be.ugent.idlab + jena-rule-utilities + 1.0 + + + be.ugent.idlab + eye-utilities + 1.0 + + + be.ugent.idlab + http-utilities + 1.0 + + + \ No newline at end of file diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/Component.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/Component.java new file mode 100644 index 0000000..6166d77 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/Component.java @@ -0,0 +1,59 @@ +package be.ugent.idlab.divide.core.component; + +import be.ugent.idlab.divide.core.context.IContextEnricher; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.rsp.IRspEngineHandler; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +class Component implements IComponent { + + private final String id; + private final IRspEngineHandler rspEngineHandler; + private final List contextIris; + private final Map contextEnricherMap; + + Component(String id, + IRspEngineHandler rspEngineHandler, + List contextIris) { + this.id = id; + this.rspEngineHandler = rspEngineHandler; + this.contextIris = new ArrayList<>(contextIris); + this.contextEnricherMap = new HashMap<>(); + } + + @Override + public String getId() { + return id; + } + + @Override + public IRspEngineHandler getRspEngineHandler() { + return rspEngineHandler; + } + + @Override + public List getContextIris() { + return contextIris; + } + + @Override + public synchronized void registerContextEnricher(IDivideQuery divideQuery, + IContextEnricher contextEnricher) { + contextEnricherMap.put(divideQuery.getName(), contextEnricher); + } + + @Override + public synchronized void unregisterContextEnricher(IDivideQuery divideQuery) { + contextEnricherMap.remove(divideQuery.getName()); + } + + @Override + public synchronized IContextEnricher getContextEnricher(IDivideQuery divideQuery) { + return contextEnricherMap.get(divideQuery.getName()); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/ComponentFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/ComponentFactory.java new file mode 100644 index 0000000..55743fe --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/ComponentFactory.java @@ -0,0 +1,70 @@ +package be.ugent.idlab.divide.core.component; + +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.rsp.IRspEngineHandler; +import be.ugent.idlab.divide.rsp.RspEngineHandlerFactory; +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.List; + +@SuppressWarnings({"UnusedReturnValue", "unused"}) +public class ComponentFactory { + + private static final Logger LOGGER = + LoggerFactory.getLogger(ComponentFactory.class.getName()); + + /** + * Creates an {@link IComponent} instance with the given inputs. + * + * @param contextIris IRIs of the ABoxes in a knowledge base that represent the relevant + * context associated to the new {@link IComponent} + * @param rspQueryLanguage RSP query language used by the RSP engine running on + * the created component + * @param rspEngineUrl URL which should be used for communication with the RSP engine + * running on the created component, and which will also be mapped + * to a unique ID for the created component + * @return the new {@link IComponent} + * @throws DivideInvalidInputException if the RSP engine URL is no valid URL + */ + public static IComponent createInstance(List contextIris, + RspQueryLanguage rspQueryLanguage, + String rspEngineUrl) + throws DivideInvalidInputException { + // create a handler for the RSP engine running on the new component + // (this includes a validation of the URL to communicate with the engine later on) + IRspEngineHandler rspEngine = RspEngineHandlerFactory.createInstance( + rspQueryLanguage, rspEngineUrl); + + // update RSP engine URL to validated & preprocessed URL + rspEngineUrl = rspEngine.getRspEngine().getBaseUrl(); + + // create a unique ID which is a modified version of the RSP engine + // URL that is file system friendly (i.e., that can be + // used in file names and directory names) + String id; + try { + URL url = new URL(rspEngineUrl); + id = String.format("%s-%d-%s", + url.getHost(), + url.getPort() != -1 ? url.getPort() : 80, + URLEncoder.encode(url.getPath(), StandardCharsets.UTF_8.toString()). + replaceAll("%", "")); + } catch (MalformedURLException | UnsupportedEncodingException e) { + // should never occur since the URL has been validated when creating the + // IRspEngineHandler above + LOGGER.error("The created component is null, so an unknown input validation " + + "error has occurred"); + throw new DivideInvalidInputException("An unknown input validation error has occurred", e); + } + + return new Component(id, rspEngine, contextIris); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/IComponent.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/IComponent.java new file mode 100644 index 0000000..fd967b6 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/component/IComponent.java @@ -0,0 +1,56 @@ +package be.ugent.idlab.divide.core.component; + +import be.ugent.idlab.divide.core.context.IContextEnricher; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.rsp.IRspEngineHandler; + +import java.util.List; + +/** + * Representation of a DIVIDE component. + * It has an ID, a list of context IRIs, and an {@link IRspEngineHandler}. + * The list of context IRIs contains all ABox IRIs in the knowledge base that + * represent the relevant context of this component, i.e., when updates to the ABox + * associated to any of its context IRIs occurs, the DIVIDE query derivation of the + * associated {@link IRspEngineHandler} should be triggered. + */ +public interface IComponent { + + /** + * Retrieves the ID of this {@link IComponent}. + * This ID is a unique and therefore based on the registration URL + * of the RSP engine running on this component. + * + * @return the ID of this {@link IComponent} + */ + String getId(); + + /** + * Retrieves the different context IRIs of this {@link IComponent}. This is a list + * of all ABox IRIs in the knowledge base that represent the relevant context of + * this component, i.e., when updates to the ABox associated to any of these context + * IRIs occurs, the DIVIDE query derivation of the associated {@link IRspEngineHandler} + * should be triggered. + * + * @return the different context IRIs of this {@link IComponent} + */ + List getContextIris(); + + /** + * Retrieves the {@link IRspEngineHandler} of this component that manages the + * RSP engine running on this component. In concrete, it handles the queries + * registered to this engine, to ensure that the relevant queries are being + * executed by this RSP engine at all times. + * + * @return the {@link IRspEngineHandler} of this component that manages the + * RSP engine running on this component + */ + IRspEngineHandler getRspEngineHandler(); + + void registerContextEnricher(IDivideQuery divideQuery, IContextEnricher contextEnricher); + + void unregisterContextEnricher(IDivideQuery divideQuery); + + IContextEnricher getContextEnricher(IDivideQuery divideQuery); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/Context.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/Context.java new file mode 100644 index 0000000..8ca5ef7 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/Context.java @@ -0,0 +1,54 @@ +package be.ugent.idlab.divide.core.context; + +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; + +import java.util.UUID; + +public class Context { + + private final String id; + private Model context; + + private boolean enriched; + + public Context(Model context) { + this.id = UUID.randomUUID().toString(); + this.context = context; + this.enriched = false; + } + + Context(String id, Model context) { + this.id = id; + this.context = context; + } + + public String getId() { + return id; + } + + public Model getContext() { + return context; + } + + public void enrichContext(Model context) { + if (!enriched) { + this.context = context; + enriched = true; + } else { + throw new RuntimeException(String.format( + "Context with ID '%s' has already been enriched", id)); + } + } + + public long size() { + return context.size(); + } + + public Context copy() { + Model newModel = ModelFactory.createDefaultModel(); + newModel.add(context.listStatements()); + return new Context(newModel); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricher.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricher.java new file mode 100644 index 0000000..02290f7 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricher.java @@ -0,0 +1,264 @@ +package be.ugent.idlab.divide.core.context; + +import be.ugent.idlab.divide.core.engine.DivideOntology; +import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaUtilities; +import org.apache.jena.query.QueryExecution; +import org.apache.jena.query.QueryExecutionFactory; +import org.apache.jena.rdf.model.InfModel; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.reasoner.rulesys.GenericRuleReasoner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +public class ContextEnricher implements IContextEnricher { + + private static final Logger LOGGER = LoggerFactory.getLogger(ContextEnricher.class.getName()); + + private final List queries; + private final ContextEnricherMode mode; + private final String componentId; + + private DivideOntology registeredOntology; + private Model baseModel; + + public ContextEnricher(List queries, + ContextEnricherMode mode, + String componentId) { + this.queries = queries; + this.mode = mode; + this.componentId = componentId; + + this.registeredOntology = null; + this.baseModel = ModelFactory.createDefaultModel(); + } + + @Override + public synchronized void registerOntology(DivideOntology ontology) { + LOGGER.info("Registering ontology with ID {} to context enricher of component {} with mode {}", + ontology.getId(), componentId, mode); + + // check if currently registered ontology exists and has the same ID + // as the new ontology + if (this.registeredOntology != null && + this.registeredOntology.getId().equals(ontology.getId())) { + // -> if yes, then no action should be taken anymore + return; + } + + // update saved ontology to the new ontology + this.registeredOntology = ontology; + + if (this.queries.isEmpty()) { + // if no queries are registered for context enrichment, then there is + // no need to do the ontology registration process + return; + } + + if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING || + this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING) { + // when only executing the queries on the context, nothing should be + // done with the triples of the registered ontology + // -> only if reasoning is still done, the rules need to be parsed + if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING) { + GenericRuleReasoner reasoner = new GenericRuleReasoner(ontology.getRules()); + // RETE algorithm is required to ensurer fast incremental reasoning + // -> downside: for some reason, the output of reasoning with this model leads to + // duplicate triples in the inferred model (not always the same number) + reasoner.setMode(GenericRuleReasoner.FORWARD_RETE); + + // already perform reasoning on the model with the ontology triples + LOGGER.info("Start preparing reasoning during context enrichment with rule reasoner " + + "in context enricher of component {}", componentId); + long start = System.currentTimeMillis(); + InfModel infModel = ModelFactory.createInfModel( + reasoner, ModelFactory.createDefaultModel()); + infModel.prepare(); + LOGGER.debug("Finished preparing reasoning during context enrichment with rule reasoner " + + "in context enricher of component {} in {} ms", + componentId, System.currentTimeMillis() - start); + + // set resulting model as base model for context enrichment + this.baseModel = infModel; + } + + return; + } + + // IF THIS PART IS REACHED: queries definitely need to be executed + // on the ontology triples as well + // -> add ontology triples to a new Jena model + Model model = ModelFactory.createDefaultModel(); + model.add(ontology.getModel()); + + if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING) { + // set base model for context enrichment to clean model + // with only the ontology triples + this.baseModel = model; + + } else if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING) { + // convert OWL ontology to a set of Jena rules + // (i.e., extract all OWL 2 RL axioms from ontology and convert into rules that + // Jena understands) + // -> create a Jena rule reasoner that uses these rules for reasoning + LOGGER.info("Create Jena rule reasoner with rules extracted from ontology " + + "in context enricher of component {}", componentId); + GenericRuleReasoner reasoner = new GenericRuleReasoner(ontology.getRules()); + // RETE algorithm is required to ensurer fast incremental reasoning + // -> downside: for some reason, the output of reasoning with this model leads to + // duplicate triples in the inferred model (not always the same number) + reasoner.setMode(GenericRuleReasoner.FORWARD_RETE); + + // already perform reasoning on the model with the ontology triples + LOGGER.info("Start preparing reasoning during context enrichment with rule reasoner " + + "in context enricher of component {}", componentId); + long start = System.currentTimeMillis(); + InfModel infModel = ModelFactory.createInfModel(reasoner, model); + infModel.prepare(); + LOGGER.debug("Finished preparing reasoning during context enrichment with rule reasoner " + + "in context enricher of component {} in {} ms", + componentId, System.currentTimeMillis() - start); + + // set resulting model as base model for context enrichment + this.baseModel = infModel; + } + } + + public synchronized void enrichContext(Context context) { + long start, end; + + if (queries.isEmpty()) { + // if no queries are registered, then no context enrichment + // needs to take place obviously + LOGGER.info("No queries to enrich context {} for component {}", + context.getId(), componentId); + + return; + } + + LOGGER.info("Enriching context {} for component {}: starting with context of {} triples", + context.getId(), componentId, context.size()); + + // create model for resulting context and add base context + Model result = ModelFactory.createDefaultModel(); + result.add(context.getContext()); + + // add context data to model to execute queries + start = System.currentTimeMillis(); + this.baseModel.add(context.getContext()); + end = System.currentTimeMillis(); + LOGGER.info("Enriching context {} for component {}: added {} context triples " + + "to base model (now containing {} triples) in {} ms", + context.getId(), componentId, context.size(), baseModel.size(), end - start); + + // create model to remove at the end from the base model, and add base context + Model toBeRemoved = ModelFactory.createDefaultModel(); + toBeRemoved.add(context.getContext()); + + // loop over all queries in order + for (int i = 0; i < queries.size(); i++) { + ContextEnrichingQuery query = queries.get(i); + + // save model to execute query on + Model queryModel; + // -> in the reasoning case, a new query model will be constructed + // to remove any duplicates that have been created by the FORWARD_RETE + // rule reasoning + // -> the number of duplicate triples is not deterministic, but (luckily) + // the number of unique triples is deterministic! + // -> so these duplicates need to be removed for the queries + if (this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING || + this.mode == ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING) { + start = System.currentTimeMillis(); + + // start with empty query model + queryModel = ModelFactory.createDefaultModel(); + + // retrieve the two parts of the inferred model: + // - the raw model (triples after adding context but before doing incremental + // reasoning on new model with added context triples) + // - the deductions model (triples inferred from doing the incremental reasoning + // on new model with added context triples) + InfModel inferredBaseModel = (InfModel) this.baseModel; + Model rawModel = inferredBaseModel.getRawModel(); + Model deductionsModel = inferredBaseModel.getDeductionsModel(); + + // find duplicates, i.e., triples in deductions model that were already present + // in raw model + Model duplicates = deductionsModel.intersection(rawModel); + + // create new version of deductions model without the duplicate triples + Model nonDuplicateDeductionsModel = ModelFactory.createDefaultModel(); + nonDuplicateDeductionsModel.add(deductionsModel); + nonDuplicateDeductionsModel.remove(duplicates); + + // create query model from original raw model, and deductions model + // without the duplicate triples + queryModel.add(rawModel); + queryModel.add(nonDuplicateDeductionsModel); + + end = System.currentTimeMillis(); + LOGGER.info("Enriching context {} for component {}: removing {} duplicates in " + + "{} ms to construct query model with {} triples", + context.getId(), componentId, + duplicates.size(), end - start, queryModel.size()); + + } else { + // -> in non-reasoning cases, this will be the base model + queryModel = ModelFactory.createDefaultModel(); + queryModel.add(this.baseModel); + } + + start = System.currentTimeMillis(); + try (QueryExecution queryExecution = + QueryExecutionFactory.create(query.getQuery(), queryModel)) { + // execute query on query model + Model queryResult = queryExecution.execConstruct(); + end = System.currentTimeMillis(); + + LOGGER.info("Enriching context {} for component {}: executed query {} in " + + "{} ms to yield {} additional context triples", + context.getId(), componentId, query.getName(), + end - start, queryResult.size()); + if (!queryResult.isEmpty()) { + JenaUtilities.printModel(queryResult); + } + + // add resulting triples to context + result.add(queryResult); + + // add resulting triples to base model to ensure dependent queries work + // (only if another query follows of course) + if (i != queries.size() - 1) { + LOGGER.info("Temporarily add {} additional context triples resulting from " + + "query {} to base model for execution of following query", + queryResult.size(), query.getName()); + this.baseModel.add(queryResult); + toBeRemoved.add(queryResult); + } + + } catch (Exception e) { + LOGGER.error("Error during the execution of query {} in context " + + "enricher of context {} for component {}", + query.getName(), context.getId(), componentId, e); + + // if anything goes wrong during the context enrichment, the original + // context is returned instead of a partially enriched version + return; + } + } + + // again remove all context data from the model + start = System.currentTimeMillis(); + this.baseModel.remove(toBeRemoved); + end = System.currentTimeMillis(); + LOGGER.info("Enriching context {} for component {}: removed context triples " + + "from base model in {} ms", context.getId(), componentId, end - start); + + // update enriched context + context.enrichContext(result); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherFactory.java new file mode 100644 index 0000000..69b4070 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherFactory.java @@ -0,0 +1,26 @@ +package be.ugent.idlab.divide.core.context; + +public class ContextEnricherFactory { + + /** + * Create and return a new DIVIDE context enricher. + * + * @return newly created DIVIDE context enricher + */ + public static synchronized IContextEnricher createInstance(ContextEnrichment contextEnrichment, + String componentId) { + // only create a context enricher with actual logic, if context enriching queries + // are defined in the given context enrichment + if (contextEnrichment == null || + contextEnrichment.getQueries() == null || + contextEnrichment.getQueries().isEmpty()) { + return new DummyContextEnricher(); + } else { + return new ContextEnricher( + contextEnrichment.getQueries(), + contextEnrichment.getMode(), + componentId); + } + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherMode.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherMode.java new file mode 100644 index 0000000..2b836ce --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnricherMode.java @@ -0,0 +1,26 @@ +package be.ugent.idlab.divide.core.context; + +public enum ContextEnricherMode { + + EXECUTE_ON_CONTEXT_WITHOUT_REASONING(false, false), + EXECUTE_ON_CONTEXT_WITH_REASONING(false, true), + EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING(true, false), + EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING(true, true); + + private final boolean loadOntology; + private final boolean performReasoning; + + ContextEnricherMode(boolean loadOntology, boolean performReasoning) { + this.loadOntology = loadOntology; + this.performReasoning = performReasoning; + } + + public boolean loadOntology() { + return loadOntology; + } + + public boolean performReasoning() { + return performReasoning; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichingQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichingQuery.java new file mode 100644 index 0000000..afd742d --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichingQuery.java @@ -0,0 +1,26 @@ +package be.ugent.idlab.divide.core.context; + +public class ContextEnrichingQuery { + + private final String name; + private final String query; + + ContextEnrichingQuery(String name, String query) { + this.name = name; + this.query = query; + } + + ContextEnrichingQuery(int order, String query) { + this.name = String.format("query-%d", order); + this.query = query; + } + + public String getName() { + return name; + } + + public String getQuery() { + return query; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichment.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichment.java new file mode 100644 index 0000000..63318ea --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/ContextEnrichment.java @@ -0,0 +1,56 @@ +package be.ugent.idlab.divide.core.context; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +public class ContextEnrichment { + + private final ContextEnricherMode mode; + private final List queries; + + public ContextEnrichment() { + // default constructor when no context enrichment is available + this.mode = ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING; + this.queries = new ArrayList<>(); + } + + public ContextEnrichment(boolean doReasoning, + boolean executeWithOntologyTriples, + List queries) { + // set list of queries + this.queries = IntStream.range(0, queries.size()) + .mapToObj(i -> new ContextEnrichingQuery(i, queries.get(i))) + .filter(query -> query.getQuery() != null && !query.getQuery().trim().isEmpty()) + .collect(Collectors.toList()); + + // set correct mode + if (this.queries.isEmpty()) { + this.mode = ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING; + } else { + this.mode = executeWithOntologyTriples ? + (doReasoning ? ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITH_REASONING : + ContextEnricherMode.EXECUTE_ON_CONTEXT_AND_ONTOLOGY_WITHOUT_REASONING) : + (doReasoning ? ContextEnricherMode.EXECUTE_ON_CONTEXT_WITH_REASONING : + ContextEnricherMode.EXECUTE_ON_CONTEXT_WITHOUT_REASONING); + } + } + + public ContextEnricherMode getMode() { + return mode; + } + + public List getQueries() { + return queries; + } + + @Override + public String toString() { + return "ContextEnrichment{" + + "mode=" + mode + + ", queries=" + queries + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/DummyContextEnricher.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/DummyContextEnricher.java new file mode 100644 index 0000000..04d5ed1 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/DummyContextEnricher.java @@ -0,0 +1,17 @@ +package be.ugent.idlab.divide.core.context; + +import be.ugent.idlab.divide.core.engine.DivideOntology; + +public class DummyContextEnricher implements IContextEnricher { + + @Override + public void registerOntology(DivideOntology ontology) { + // do nothing - empty on purpose + } + + @Override + public void enrichContext(Context context) { + // do nothing - empty on purpose + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/IContextEnricher.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/IContextEnricher.java new file mode 100644 index 0000000..cc206b4 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/context/IContextEnricher.java @@ -0,0 +1,11 @@ +package be.ugent.idlab.divide.core.context; + +import be.ugent.idlab.divide.core.engine.DivideOntology; + +public interface IContextEnricher { + + void registerOntology(DivideOntology ontology); + + void enrichContext(Context context); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideComponentManager.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideComponentManager.java new file mode 100644 index 0000000..0783c1f --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideComponentManager.java @@ -0,0 +1,268 @@ +package be.ugent.idlab.divide.core.engine; + +import be.ugent.idlab.divide.core.component.ComponentFactory; +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.context.Context; +import be.ugent.idlab.divide.core.exception.DivideInitializationException; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import be.ugent.idlab.divide.util.LogConstants; +import be.ugent.idlab.kb.IIriResolver; +import be.ugent.idlab.kb.IKnowledgeBase; +import be.ugent.idlab.kb.IKnowledgeBaseObserver; +import be.ugent.idlab.kb.exception.InvalidIriException; +import be.ugent.idlab.kb.exception.KnowledgeBaseOperationException; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +class DivideComponentManager implements IKnowledgeBaseObserver { + + private static final Logger LOGGER = + LoggerFactory.getLogger(DivideComponentManager.class.getName()); + + private final DivideEngine divideEngine; + + private final IKnowledgeBase knowledgeBase; + + private final Map registeredComponents; + + /** + * Map which keeps track of how context IRIs are mapped to components observing + * this context IRI, i.e., for each context IRI, it keeps track of for which + * components the ABox with that IRI in the knowledge base partly defines its + * context. If a change to a specific context IRI is observed, this means that + * the query derivation should be triggered for each component in the + * associated list. + */ + private final Map> contextIriObservers; + + /** + * Boolean representing whether RSP engine streams on a component should be paused + * when context changes are detected that trigger the DIVIDE query derivation for + * that component. + */ + private final boolean pauseRspEngineStreamsOnContextChanges; + + /** + * Creates a new instance of a {@link DivideComponentManager} associated + * to the given {@link DivideEngine} and {@link IKnowledgeBase}. + * + * @param divideEngine DIVIDE engine for which the new instance should manage components, + * and which will perform the query derivation if the new component + * manager observes changes in the context associated to a component + * @param knowledgeBase knowledge base that should be used to observe any changes + * to the context of the managed components + * @param pauseRspEngineStreamsOnContextChanges boolean representing whether RSP engine + * streams on a component should be paused + * when context changes are detected that + * trigger the DIVIDE query derivation for + * that component + */ + DivideComponentManager(DivideEngine divideEngine, + IKnowledgeBase knowledgeBase, + boolean pauseRspEngineStreamsOnContextChanges) { + this.divideEngine = divideEngine; + this.knowledgeBase = knowledgeBase; + + this.registeredComponents = new HashMap<>(); + this.contextIriObservers = new HashMap<>(); + + this.knowledgeBase.registerObserver(this); + + this.pauseRspEngineStreamsOnContextChanges = pauseRspEngineStreamsOnContextChanges; + } + + synchronized IComponent registerComponent(List contextIris, + RspQueryLanguage rspQueryLanguage, + String rspEngineUrl) + throws DivideInvalidInputException { + // resolve all context IRIs + List resolvedContextIris = new ArrayList<>(); + try { + IIriResolver iriResolver = knowledgeBase.getIriResolver(); + for (String contextIri : contextIris) { + resolvedContextIris.add(iriResolver.resolveIri(contextIri)); + } + } catch (InvalidIriException e) { + throw new DivideInvalidInputException("Invalid context IRI(s) which cannot be " + + "resolved by the DIVIDE knowledge base", e); + } + + // create component + IComponent component = ComponentFactory.createInstance( + resolvedContextIris, rspQueryLanguage, rspEngineUrl); + + // ensure component with that ID does not yet exist + if (registeredComponents.containsKey(component.getId())) { + LOGGER.warn("Trying to register component with already existing ID"); + return null; + } + + LOGGER.info("Registering component with ID '{}'", component.getId()); + + // keep track of component by ID + registeredComponents.put(component.getId(), component); + + return component; + } + + void addContextIriObserver(String contextIri, IComponent component) { + if (contextIriObservers.containsKey(contextIri)) { + contextIriObservers.get(contextIri).add(component); + } else { + List observers = new ArrayList<>(); + observers.add(component); + contextIriObservers.put(contextIri, observers); + } + } + + /** + * @return removed component if component with given ID exists and is removed + * from the list of registered components, null if no component with + * given ID exists + */ + synchronized IComponent unregisterComponent(String componentId) { + IComponent component = registeredComponents.remove(componentId); + if (component != null) { + LOGGER.info("Unregistering component with ID '{}'", componentId); + + // remove component as observer for its context IRIs + component.getContextIris().forEach( + s -> contextIriObservers.get(s).remove(component)); + } + + return component; + } + + synchronized Collection getRegisteredComponents() { + return registeredComponents.values(); + } + + synchronized IComponent getRegisteredComponentById(String id) { + return registeredComponents.get(id); + } + + Model getContextAssociatedToComponent(String id) { + IComponent component = registeredComponents.get(id); + if (component != null) { + try { + Model componentContext = ModelFactory.createDefaultModel(); + for (String contextIri : component.getContextIris()) { + // get ABox associated to each context IRI + Model context = knowledgeBase.getABox(contextIri); + + // add retrieved context to full context of this component + componentContext.add(context.listStatements()); + } + return componentContext; + + } catch (KnowledgeBaseOperationException e) { + // if an error occurs when retrieving the knowledge base context + // for a given component, the context is incomplete and therefore + // considered non-existing + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Error occurred when retrieving current context of" + + " component with ID {}", + component.getId(), e); + + return null; + } + + } else { + return null; + } + } + + @Override + public synchronized void notifyABoxUpdated(String iri, Model model) { + // check if queries need to be updated for components + // (is the case for components observing this iri) + boolean updateQueries = contextIriObservers.containsKey(iri) && + !contextIriObservers.get(iri).isEmpty(); + + if (updateQueries) { + LOGGER.info("Receiving knowledge base update for ABox with IRI '{}'", iri); + + // as soon as a context change is detected, the RSP engine should be paused + // until further notice (i.e., until the query registration finished at some + // point and restarts it again) + if (pauseRspEngineStreamsOnContextChanges) { + for (IComponent component : contextIriObservers.get(iri)) { + component.getRspEngineHandler().pauseRspEngineStreams(); + } + } + + // keep track of map with fetched contexts + Map contextSnapshots = new HashMap<>(); + + // handle every observing component + for (IComponent component : contextIriObservers.get(iri)) { + try { + Model componentContext = ModelFactory.createDefaultModel(); + + // retrieve context for every IRI that is part of this component's context + for (String contextIri : component.getContextIris()) { + Model context; + if (iri.equals(contextIri)) { + context = model; + } else if (contextSnapshots.containsKey(contextIri)) { + context = contextSnapshots.get(contextIri); + } else { + context = knowledgeBase.getABox(contextIri); + contextSnapshots.put(contextIri, context); + } + + // add retrieved context to full context of this component + componentContext.add(context.listStatements()); + } + + // update queries for component using its full context + divideEngine.enqueueGeneralDivideQueryDerivationTask( + component, new Context(componentContext)); + + } catch (KnowledgeBaseOperationException e) { + // if an error occurs when retrieving the knowledge base context + // for a given component, no RSP query update is enqueued for this + // component (because the context is incomplete) + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Error occurred when retrieving current context of" + + " component with ID {} -> queries are NOT updated", + component.getId(), e); + + // TODO MONITOR: 01/02/2021 do something with this? + + } + } + } + } + + @Override + public synchronized void notifyTBoxUpdated(Model model) { + Thread tBoxUpdateThread = new Thread(() -> { + try { + LOGGER.info("TBox of DIVIDE knowledge base updated -> reloaded as DIVIDE ontology"); + + // load new ontology to the DIVIDE engine + divideEngine.loadOntology(model); + + } catch (DivideInvalidInputException | DivideInitializationException e) { + // if something goes wrong, it should be logged, + // BUT the engine is guaranteed to continue working with the + // latest successfully loaded ontology, so no further action + // is required + LOGGER.error("Reloading new TBox as DIVIDE ontology FAILED - DIVIDE engine will" + + " continue working with the latest successfully loaded ontology"); + } + }); + tBoxUpdateThread.start(); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngine.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngine.java new file mode 100644 index 0000000..4141933 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngine.java @@ -0,0 +1,1166 @@ +package be.ugent.idlab.divide.core.engine; + +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.context.Context; +import be.ugent.idlab.divide.core.context.ContextEnrichment; +import be.ugent.idlab.divide.core.exception.DivideInitializationException; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException; +import be.ugent.idlab.divide.core.query.DivideQueryFactory; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserFactory; +import be.ugent.idlab.divide.core.query.parser.IDivideQueryParser; +import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException; +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import be.ugent.idlab.divide.util.LogConstants; +import be.ugent.idlab.kb.IKnowledgeBase; +import be.ugent.idlab.kb.exception.KnowledgeBaseOperationException; +import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaOwlApiUtilities; +import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaRuleUtilities; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.reasoner.rulesys.Rule; +import org.semanticweb.owlapi.model.OWLOntology; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +class DivideEngine implements IDivideEngine { + + private static final Logger LOGGER = LoggerFactory.getLogger(DivideEngine.class.getName()); + + /** + * Worker thread pool of maximum 100 threads to parallelize + * the query derivation process + */ + private final ThreadPoolExecutor workerThreadPool; + + /** + * Instance responsible for deriving the instantiated RSP-QL queries from + * the generic DIVIDE queries + */ + private IDivideQueryDeriver divideQueryDeriver; + + /** + * Manager of the DIVIDE components associated to this DIVIDE engine + */ + private DivideComponentManager divideComponentManager; + + /** + * Map linking a query name to the DIVIDE query instance + */ + private final Map divideQueryMap; + + /** + * Map linking a component ID to its queue where query update + * requests can be put + */ + private final Map> componentQueryUpdateQueueMap; + + /** + * Map linking a component ID to the thread that is processing + * its query update queue + */ + private final Map componentQueryUpdateThreadMap; + + private DivideOntology divideOntology; + + /** + * Boolean representing whether the engine has been successfully initialized + */ + private boolean initialized; + + /** + * Boolean representing whether RSP engine streams on a component should be + * paused when context changes are detected that trigger the DIVIDE query + * derivation for that component + */ + private boolean pauseRspEngineStreamsOnContextChanges; + + /** + * Boolean representing whether variable matches in the input for the DIVIDE + * query parser that are not defined as mappings, should be considered as + * mappings by default + */ + private boolean processUnmappedVariableMatchesInParser; + + /** + * Boolean representing whether variables in the RSP-QL query body generated by + * the DIVIDE query parser, should be validated (= checked for occurrence in the + * WHERE clause of the query or in the set of input variables that will be + * substituted during the DIVIDE query derivation) during parsing + */ + private boolean validateUnboundVariablesInRspQlQueryBodyInParser; + + /** + * Patterns used for preprocessing a DIVIDE query's sensor query rule + */ + private static final Pattern INPUT_VARIABLE_NAME_PATTERN = + Pattern.compile("\"\\?([^\"]+)\""); + private static final Pattern INPUT_VARIABLE_DEFINITION_PATTERN = + Pattern.compile("\\(" + INPUT_VARIABLE_NAME_PATTERN + "\\s+\\?[^()]+\\s*\\)"); + private static final Pattern INPUT_VARIABLE_LIST_PATTERN = + Pattern.compile("(inputVariables>?)\\s+\\(\\s*(\\s*" + + INPUT_VARIABLE_DEFINITION_PATTERN + ")*\\s*\\)"); + + DivideEngine() { + // set initialized flag to false until initialize method is called + // and successfully ended + this.initialized = false; + + // create engine objects + this.divideQueryDeriver = null; + this.divideComponentManager = null; + this.divideQueryMap = new HashMap<>(); + this.componentQueryUpdateQueueMap = new HashMap<>(); + this.componentQueryUpdateThreadMap = new HashMap<>(); + this.divideOntology = null; + + // create worker thread pool + this.workerThreadPool = (ThreadPoolExecutor) Executors.newFixedThreadPool(100); + this.workerThreadPool.setCorePoolSize(50); + this.workerThreadPool.setMaximumPoolSize(100); + // alive time set below has no influence on whether and when the query update + // threads for the different components are stopped, since they are running + // outside this worker thread pool (it is only used for the query derivation for + // a single DIVIDE query, submitted by the query update thread!) + this.workerThreadPool.setKeepAliveTime(1, TimeUnit.HOURS); + } + + @Override + public void initialize(IDivideQueryDeriver divideQueryDeriver, + IKnowledgeBase knowledgeBase, + Model divideOntologyModel, + boolean pauseRspEngineStreamsOnContextChanges, + boolean processUnmappedVariableMatchesInParser, + boolean validateUnboundVariablesInRspQlQueryBodyInParser) + throws DivideInvalidInputException, DivideInitializationException { + LOGGER.info("Initializing DIVIDE engine..."); + + // save engine that handles the actual query derivation + this.divideQueryDeriver = divideQueryDeriver; + + // load ontology in DIVIDE query deriver + loadOntology(divideOntologyModel); + + // register ontology as TBox of the knowledge base + // (important to do this before creating the DIVIDE component manager, + // since this manager will register itself as an observer of the + // ontology, and should not receive a TBox update yet + try { + LOGGER.info("Registering DIVIDE ontology as TBox of knowledge base"); + knowledgeBase.setTBox(divideOntologyModel); + } catch (KnowledgeBaseOperationException e) { + // should normally not occur + throw new DivideInitializationException( + "Error when registering DIVIDE ontology as TBox in knowledge base", e); + } + + // save setting on pausing RSP engine streams + this.pauseRspEngineStreamsOnContextChanges = pauseRspEngineStreamsOnContextChanges; + + // save parser settings + this.processUnmappedVariableMatchesInParser = processUnmappedVariableMatchesInParser; + this.validateUnboundVariablesInRspQlQueryBodyInParser = + validateUnboundVariablesInRspQlQueryBodyInParser; + + // create DIVIDE component manager + this.divideComponentManager = new DivideComponentManager( + this, knowledgeBase, pauseRspEngineStreamsOnContextChanges); + + // register successful initialization + this.initialized = true; + } + + /** + * Loads the ontology that needs to be used as input (TBox) for each query + * derivation performed by the query deriver of this engine. + * + * If an ontology has been loaded successfully in the past at least once, + * this method will reload the ontology based on the new input. If something + * goes wrong during this reloading and an exception is thrown, the query + * deriver should still be in a valid state and continue working with the + * latest successfully loaded ontology. + * + * @throws DivideInitializationException if something goes wrong during the ontology + * loading process, which prevents the DIVIDE engine + * from functioning as it should + * @throws DivideInvalidInputException when the specified ontology contains invalid + * statements, i.e., statements which cannot be loaded + * by the query deriver + */ + synchronized void loadOntology(Model divideOntologyModel) + throws DivideInvalidInputException, DivideInitializationException { + LOGGER.info("Loading ontology..."); + LOGGER.debug(LogConstants.METRIC_MARKER, "LOAD_ONTOLOGY_START"); + + long start = System.currentTimeMillis(); + + // load ontology into DIVIDE query deriver + divideQueryDeriver.loadOntology(divideOntologyModel); + + // convert ontology to a set of rules + // (to be used by the context enrichers) + OWLOntology divideOntology = + JenaOwlApiUtilities.getOWLOntology(divideOntologyModel); + List divideOntologyRules = + JenaRuleUtilities.convertOntologyToRulesList(divideOntology); + + // save ontology model and rules to engine + this.divideOntology = new DivideOntology(divideOntologyModel, divideOntologyRules); + + // update context enrichers for all components registered to engine + if (divideComponentManager != null) { + for (IComponent component : divideComponentManager.getRegisteredComponents()) { + enqueueContextEnricherUpdaterTask(component); + } + } + + LOGGER.debug(LogConstants.METRIC_MARKER, "LOAD_ONTOLOGY_END"); + LOGGER.info("Finished loading ontology in {} ms", System.currentTimeMillis() - start); + } + + @Override + public IDivideQuery addDivideQuery(String name, + String queryPattern, + String sensorQueryRule, + String goal, + ContextEnrichment contextEnrichment) throws + DivideNotInitializedException, DivideQueryDeriverException, DivideInvalidInputException { + LOGGER.info("Adding DIVIDE query with name '{}'...", name); + + if (!initialized) { + throw new DivideNotInitializedException(); + } + + // ensure DIVIDE query with that name does not yet exist + if (divideQueryMap.containsKey(name)) { + LOGGER.warn("Trying to add DIVIDE query with already existing name '{}'", name); + return null; + } + + // preprocess sensor query rule to avoid issues with overlapping variables + sensorQueryRule = preprocessSensorQueryRule(sensorQueryRule); + + // create DIVIDE query + IDivideQuery divideQuery = DivideQueryFactory.createInstance( + name, queryPattern, sensorQueryRule, goal, contextEnrichment); + + try { + // validate the defined context enrichment of the new DIVIDE query + validateContextEnrichment(divideQuery.getContextEnrichment()); + + // register DIVIDE query at query deriver + divideQueryDeriver.registerQuery(divideQuery, getQueryParser()); + + // keep track of DIVIDE query in map + synchronized (divideQueryMap) { + divideQueryMap.put(name, divideQuery); + } + + // start query derivation for this DIVIDE query only, + // for each component registered to the engine + for (IComponent component : divideComponentManager.getRegisteredComponents()) { + // to do so, first a context enricher should be created for this new query + // -> enqueue task to register new context enricher + enqueueContextEnricherUpdaterTask(component, divideQuery); + + // retrieve current context associated to component + Model componentContext = divideComponentManager. + getContextAssociatedToComponent(component.getId()); + + // enqueue query derivation for new DIVIDE query if context of component + // exists and is non-empty + if (componentContext == null) { + LOGGER.info("No context available yet for component '{}' " + + "=> no query derivation for new DIVIDE query '{}' enqueued", + component.getId(), divideQuery.getName()); + } else if (componentContext.isEmpty()) { + LOGGER.info("Available context for component '{}' is empty " + + "=> no query derivation for new DIVIDE query '{}' enqueued", + component.getId(), divideQuery.getName()); + } else { + LOGGER.info("Context for component '{}' is available and non-empty " + + "=> query derivation for new DIVIDE query '{}' enqueued", + component.getId(), divideQuery.getName()); + enqueueSpecificDivideQueryDerivationTask( + component, new Context(componentContext), divideQuery); + } + } + + return divideQuery; + + } catch (DivideInvalidInputException e) { + LOGGER.warn("Something went wrong when registering the new DIVIDE query to " + + "because the given input is invalid", e); + throw e; + + } catch (DivideQueryDeriverException e) { + LOGGER.warn("Something went wrong when registering the new DIVIDE query to " + + "the query deriver - DIVIDE query is therefore NOT registered", e); + throw e; + } + } + + private void validateContextEnrichment(ContextEnrichment contextEnrichment) + throws DivideInvalidInputException { + // validate every individual query + try { + getQueryParser().validateDivideQueryContextEnrichment(contextEnrichment); + } catch (InvalidDivideQueryParserInputException e) { + throw new DivideInvalidInputException( + "DIVIDE query contains invalid context-enriching queries" + + (e.getMessage() != null ? ": " + e.getMessage() : ""), e); + } + } + + @Override + public void removeDivideQuery(String name, + boolean unregisterQueries) throws DivideNotInitializedException { + LOGGER.info("Removing DIVIDE query with name '{}'...", name); + + if (!initialized) { + throw new DivideNotInitializedException(); + } + + IDivideQuery divideQuery; + synchronized (divideQueryMap) { + divideQuery = divideQueryMap.remove(name); + } + if (divideQuery != null) { + // unregister query at query deriver + divideQueryDeriver.unregisterQuery(divideQuery); + + // enqueue task to handle the removal of the DIVIDE query at this component + // -> context enricher for this query will be unregistered + // -> all RSP queries that originate from this DIVIDE query are unregistered + // on the registered components of the system (only if specified to do so) + for (IComponent component : divideComponentManager.getRegisteredComponents()) { + enqueueDivideQueryRemovalHandlingTask(component, divideQuery, unregisterQueries); + } + } + } + + @Override + public Collection getDivideQueries() throws DivideNotInitializedException { + if (!initialized) { + throw new DivideNotInitializedException(); + } + + return divideQueryMap.values(); + } + + @Override + public IDivideQuery getDivideQueryByName(String name) throws DivideNotInitializedException { + if (!initialized) { + throw new DivideNotInitializedException(); + } + + return divideQueryMap.get(name); + } + + @Override + public IComponent registerComponent(List contextIris, + RspQueryLanguage rspQueryLanguage, + String rspEngineUrl) + throws DivideNotInitializedException, DivideInvalidInputException { + LOGGER.info("Adding new DIVIDE component..."); + + if (!initialized) { + throw new DivideNotInitializedException(); + } + + // register new component at component manager + IComponent component = divideComponentManager.registerComponent( + contextIris, rspQueryLanguage, rspEngineUrl); + + // if component is not zero, prepare the engine for handling query update requests + if (component != null) { + // create a queue for this component where query updates requests can be put + final LinkedBlockingQueue queryUpdateQueue = + new LinkedBlockingQueue<>(); + componentQueryUpdateQueueMap.put(component.getId(), queryUpdateQueue); + + // create and start a thread for this component that continuously processes + // the query update queue + LOGGER.info("Starting new query update thread for component '{}'", component.getId()); + Thread queryUpdateThread = new Thread(() -> + processRspQueryUpdateQueue(component, queryUpdateQueue)); + queryUpdateThread.start(); + componentQueryUpdateThreadMap.put(component.getId(), queryUpdateThread); + + // enqueue a task to register a new context enricher associated to this component + // -> this will be done in parallel for the different existing DIVIDE queries + enqueueContextEnricherUpdaterTask(component); + + // check if context is available for component, and if so, enqueue first + // query derivation before registering observers + Model componentContext = divideComponentManager. + getContextAssociatedToComponent(component.getId()); + if (componentContext == null) { + LOGGER.info("No context available yet for component '{}' " + + "=> no query derivation enqueued yet", component.getId()); + } else if (componentContext.isEmpty()) { + LOGGER.info("Available context for component '{}' is empty " + + "=> no query derivation enqueued yet", component.getId()); + } else { + LOGGER.info("Context for component '{}' is available and non-empty " + + "=> first query derivation enqueued", component.getId()); + enqueueGeneralDivideQueryDerivationTask(component, new Context(componentContext)); + } + + // register component as observer for all its context IRIs + component.getContextIris().forEach( + s -> divideComponentManager.addContextIriObserver(s, component)); + } + + return component; + } + + @Override + public void unregisterComponent(String id, + boolean unregisterQueries) throws DivideNotInitializedException { + LOGGER.info("Unregistering DIVIDE component with ID {}...", id); + + if (!initialized) { + throw new DivideNotInitializedException(); + } + + IComponent removed = divideComponentManager.unregisterComponent(id); + + // handle query update queue & thread if component ID exists and + // is actually removed + if (removed != null) { + // no longer keep track of query update queue + componentQueryUpdateQueueMap.remove(id); + + // interrupt the query update thread but still keep track of it so that + // it can be observed that this thread was interrupted + LOGGER.info("Interrupting query update thread of component {} because " + + "it is being removed", id); + Thread queryUpdateThread = componentQueryUpdateThreadMap.get(id); + queryUpdateThread.interrupt(); + + // also interrupt the RSP engine status update thread + if (pauseRspEngineStreamsOnContextChanges) { + removed.getRspEngineHandler().stopRspEngineStreamsUpdates(); + } + + // if specified, remove all queries registered on the RSP engine + // of this component by this DIVIDE engine + // (interrupting the query update thread will ensure no new + // registrations or unregistrations take place in this thread) + if (unregisterQueries) { + removed.getRspEngineHandler().unregisterAllQueries(); + } + } + } + + @Override + public Collection getRegisteredComponents() throws DivideNotInitializedException { + if (!initialized) { + throw new DivideNotInitializedException(); + } + + return divideComponentManager.getRegisteredComponents(); + } + + @Override + public IComponent getRegisteredComponentById(String id) throws DivideNotInitializedException { + if (!initialized) { + throw new DivideNotInitializedException(); + } + + return divideComponentManager.getRegisteredComponentById(id); + } + + @Override + public IDivideQueryParser getQueryParser() { + return DivideQueryParserFactory.getInstance( + processUnmappedVariableMatchesInParser, + validateUnboundVariablesInRspQlQueryBodyInParser); + } + + synchronized DivideOntology getDivideOntology() { + return divideOntology; + } + + /** + * Preprocesses the sensor query rule of a new DIVIDE query. + * For this preprocessing, the list of input variables in the consequence of the + * rule is retrieved, parsed, checked for validity, and modified. The modifying + * part consists of updating the order in which the input variables occur in the + * list: if any input variable contains another one, the longer one should be + * present first in the list, to avoid later substitution errors during the query + * substitution process. + * + * @param sensorQueryRule sensor query rule to be preprocessed + * @return preprocessed sensor query rule + * @throws DivideInvalidInputException if definition of input variables in sensor + * query rule is invalid + */ + private String preprocessSensorQueryRule(String sensorQueryRule) + throws DivideInvalidInputException { + Matcher m1 = INPUT_VARIABLE_LIST_PATTERN.matcher(sensorQueryRule); + if (m1.find()) { + Matcher m2 = INPUT_VARIABLE_DEFINITION_PATTERN.matcher(m1.group()); + Map inputVariablesMap = new HashMap<>(); + while(m2.find()) { + Matcher m3 = INPUT_VARIABLE_NAME_PATTERN.matcher(m2.group()); + if (m3.find()) { + inputVariablesMap.put(m3.group(1), m2.group()); + } else { + throw new DivideInvalidInputException( + "Sensor query rule of DIVIDE query does not " + + "contain a valid definition of the DIVIDE input variables"); + } + } + + // sort input variable names occurring in list + // -> construct new RDF list of lists (for input variables) based on sorted names + String sortedInputVariables = inputVariablesMap.keySet() + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .map(inputVariablesMap::get) + .collect(Collectors.joining(" ")); + String replacement = String.format("%s (%s)", m1.group(1), sortedInputVariables); + + return sensorQueryRule.replaceFirst(Pattern.quote(m1.group()), replacement); + + } else { + throw new DivideInvalidInputException("Sensor query rule of DIVIDE query does not " + + "contain a valid definition of the DIVIDE input variables"); + } + } + + /** + * Adds a task to this component's queue to update the RSP queries for + * this {@link IComponent}, with the specified context as input for the + * query derivation. + * This method will be called by the {@link DivideComponentManager} when + * knowledge base changes to the context relevant for the given component + * are observed. + * + * @param component {@link IComponent} for which the RSP queries should be updated + * @param context context model to use as input for the query derivation, i.e., + * the relevant context of the context IRIs associated to the given + * {@link IComponent} instance + */ + void enqueueGeneralDivideQueryDerivationTask(IComponent component, Context context) { + try { + LOGGER.info("Enqueueing general DIVIDE query derivation task for component " + + "with ID '{}' and context ID '{}'", + component.getId(), context.getId()); + + // retrieve component's query update request queue + LinkedBlockingQueue queue = + componentQueryUpdateQueueMap.get(component.getId()); + + // if there is still any update task waiting in the queue, it can be + // removed since the context has again been updated meanwhile + // -> for the same tasks as these: it would not make sense to first + // do the update with the old context and then after that with the + // new context; better immediately do it with the new context) + // -> for a task to unregister all queries associated to a removed + // DIVIDE query: since the new query derivation will no longer do + // the query derivation for this removed DIVIDE query, an update + // of the queries after the derivation will automatically result in + // the removal of all queries associated to this removed DIVIDE + // query (since no associated queries will end up in the list of + // new queries) + // -> for a task to register new queries associated to a new DIVIDE + // query: since the new query derivation task will involve the query + // derivation for all registered DIVIDE queries, it will automatically + // also include the derivation of this new DIVIDE query + queue.clear(); + + // enqueue query update request with newest context in the component's queue + queue.put(new GeneralDivideQueryDerivationTask(component, context)); + + // restart query update thread if needed + restartQueryUpdateThreadIfNeeded(component, queue); + + } catch (InterruptedException ignored) { + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Enqueueing general DIVIDE query derivation task for component with ID '{}'" + + "and context ID '{}' resulted in unexpected InterruptedException", + component.getId(), context.getId()); + + // retry if interrupted while waiting (but the queue is not bounded + // so normally the queue put operation should not block) + enqueueGeneralDivideQueryDerivationTask(component, context); + } + } + + /** + * Adds a task to this component's queue to update the RSP queries associated + * to the given {@link IDivideQuery} for this {@link IComponent}, with the + * specified context as input for the query derivation. + * This method will be called upon the registration of a new DIVIDE query. + * + * @param component {@link IComponent} for which the RSP queries should be updated + * @param context context model to use as input for the query derivation, i.e., + * the relevant context of the context IRIs associated to the given + * {@link IComponent} instance + * @param divideQuery DIVIDE query for which the query derivation task should + * be enqueued + */ + void enqueueSpecificDivideQueryDerivationTask(IComponent component, + Context context, + IDivideQuery divideQuery) { + try { + LOGGER.info("Enqueueing specific DIVIDE query derivation task for DIVIDE " + + "query '{}' for component " + + "with ID '{}' and context ID '{}'", + divideQuery.getName(), component.getId(), context.getId()); + + // retrieve component's query update request queue + LinkedBlockingQueue queue = + componentQueryUpdateQueueMap.get(component.getId()); + + // enqueue query update request with newest context in the component's queue + queue.put(new SpecificDivideQueryDerivationTask( + component, context, divideQuery)); + + // restart query update thread if needed + restartQueryUpdateThreadIfNeeded(component, queue); + + } catch (InterruptedException ignored) { + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Enqueueing specific DIVIDE query derivation task for DIVIDE query '{}'," + + "component with ID '{}' and context ID '{}' resulted " + + "in unexpected InterruptedException", + divideQuery.getName(), component.getId(), context.getId()); + + // retry if interrupted while waiting (but the queue is not bounded + // so normally the queue put operation should not block) + enqueueSpecificDivideQueryDerivationTask(component, context, divideQuery); + } + } + + /** + * Adds a task to this component's queue to unregister the RSP queries for + * this {@link IComponent} that are currently registered via the DIVIDE query + * derivation of the specified DIVIDE query. + * This method will be called by this engine when a DIVIDE query is unregistered + * from the DIVIDE engine. + * + * @param component {@link IComponent} for which the RSP queries should be unregistered + * @param divideQuery DIVIDE query of which the associated RSP engine queries need to be + * unregistered from the wrapped RSP engine + */ + private void enqueueDivideQueryRemovalHandlingTask(IComponent component, + IDivideQuery divideQuery, + boolean unregisterQueries) { + try { + LOGGER.info("Enqueueing DIVIDE query removal handling task for component " + + "with ID '{}' and DIVIDE query '{}'", + component.getId(), divideQuery.getName()); + + // retrieve component's query update request queue + LinkedBlockingQueue queue = + componentQueryUpdateQueueMap.get(component.getId()); + + // enqueue removal handling task + // IMPORTANT: the queue is not cleared in this case, since this involves + // no query derivation of the remaining DIVIDE queries which + // is therefore a task that cannot be ignored + queue.put(new DivideQueryRemovalHandlingTask( + component, divideQuery, unregisterQueries)); + + // restart query update thread if needed + restartQueryUpdateThreadIfNeeded(component, queue); + + } catch (InterruptedException ignored) { + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Enqueueing DIVIDE query removal handling task for component with ID '{}' " + + "and DIVIDE query '{}' resulted in unexpected InterruptedException", + component.getId(), divideQuery.getName()); + + // retry if interrupted while waiting (but the queue is not bounded + // so normally the queue put operation should not block) + enqueueDivideQueryRemovalHandlingTask(component, divideQuery, unregisterQueries); + } + } + + private void enqueueContextEnricherUpdaterTask(IComponent component, + IDivideQuery divideQuery) { + try { + LOGGER.info("Enqueueing task to update context enrichers for DIVIDE " + + "query '{}' for component with ID '{}'", + divideQuery.getName(), component.getId()); + + // retrieve component's query update request queue + LinkedBlockingQueue queue = + componentQueryUpdateQueueMap.get(component.getId()); + + // enqueue query update request with newest context in the component's queue + queue.put(new ContextEnricherUpdaterTask(component, divideQuery)); + + // restart query update thread if needed + restartQueryUpdateThreadIfNeeded(component, queue); + + } catch (InterruptedException ignored) { + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Enqueueing task to update context enrichers for DIVIDE query '{}' for " + + "component with ID '{}' resulted in unexpected InterruptedException", + divideQuery.getName(), component.getId()); + + // retry if interrupted while waiting (but the queue is not bounded + // so normally the queue put operation should not block) + enqueueContextEnricherUpdaterTask(component, divideQuery); + } + } + + private void enqueueContextEnricherUpdaterTask(IComponent component) { + try { + LOGGER.info("Enqueueing task to update context enrichers for all DIVIDE queries " + + "on component with ID '{}'", component.getId()); + + // retrieve component's query update request queue + LinkedBlockingQueue queue = + componentQueryUpdateQueueMap.get(component.getId()); + + // enqueue query update request with newest context in the component's queue + queue.put(new ContextEnricherUpdaterTask(component)); + + // restart query update thread if needed + restartQueryUpdateThreadIfNeeded(component, queue); + + } catch (InterruptedException ignored) { + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Enqueueing task to update context enrichers for all DIVIDE queries on " + + "component with ID '{}' resulted in unexpected InterruptedException", + component.getId()); + + // retry if interrupted while waiting (but the queue is not bounded + // so normally the queue put operation should not block) + enqueueContextEnricherUpdaterTask(component); + } + } + + private void restartQueryUpdateThreadIfNeeded(IComponent component, + LinkedBlockingQueue queue) { + // check if thread that is processing query updates is interrupted + // (in that case it will have been removed from the query update thread map), + // and if so, create & start new thread for this + Thread queryUpdateThread = + componentQueryUpdateThreadMap.get(component.getId()); + if (queryUpdateThread == null) { + LOGGER.info("Query update thread for component with ID '{}' has been " + + "interrupted, so a new thread is started", + component.getId()); + + Thread newQueryUpdateThread = new Thread(() -> + processRspQueryUpdateQueue(component, queue)); + newQueryUpdateThread.start(); + componentQueryUpdateThreadMap.put(component.getId(), newQueryUpdateThread); + } + } + + private void processRspQueryUpdateQueue(IComponent component, + LinkedBlockingQueue queryUpdateQueue) { + try { + boolean interrupted = false; + while (!interrupted) { + // retrieve the updated context from the queue - blocks if the + // queue is empty until an item again enters the queue + IDivideQueryUpdateTask queryUpdateTask = queryUpdateQueue.take(); + + // launch the query derivation for the given component & context + interrupted = queryUpdateTask.execute(); + } + + // thread is interrupted explicitly by the system, probably because + // the component is unregistered + LOGGER.info("Query update thread for component '{}' is found interrupted after query" + + " update, so is stopping with the processing of the query update queue", + component.getId()); + + } catch (InterruptedException e) { + LOGGER.info("Query update thread for component '{}' is interrupted while waiting," + + " so is stopping with the processing of the query update queue", + component.getId()); + } + + // remove thread from query update thread map so that the engine + // knows a new thread should be started upon arrival of a new + // query update request + // (after exiting this method, the thread status will become TERMINATED) + componentQueryUpdateThreadMap.remove(component.getId()); + } + + private class GeneralDivideQueryDerivationTask implements IDivideQueryUpdateTask { + + private final Logger LOGGER = LoggerFactory.getLogger( + GeneralDivideQueryDerivationTask.class.getName()); + + private final IComponent component; + private final Context context; + + GeneralDivideQueryDerivationTask(IComponent component, Context context) { + this.component = component; + this.context = context; + } + + @Override + public boolean execute() { + LOGGER.info("Preparing DIVIDE query derivation for component with ID '{}' " + + "and context '{}' in RSP query update thread", + component.getId(), context.getId()); + + // retrieve list of queries + Collection divideQueries; + synchronized (divideQueryMap) { + divideQueries = divideQueryMap.values(); + } + + // stop if list of queries is empty + if (divideQueries.isEmpty()) { + LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " + + "no DIVIDE queries registered, so finishing task", + component.getId(), context.getId()); + return Thread.currentThread().isInterrupted(); + } + + // run the query derivation scripts in parallel for every DIVIDE query, + // each on a dedicated thread in the worker thread pool + CountDownLatch latch = new CountDownLatch(divideQueries.size()); + DivideOntology ontology = getDivideOntology(); + for (IDivideQuery divideQuery : divideQueries) { + workerThreadPool.submit(new SingleQueryDeriver( + divideQuery, context, component, + divideQueryDeriver, ontology, latch)); + } + + // keep track of whether the thread gets interrupted while waiting for + // the other threads to finish + boolean interruptedWhileWaiting = false; + + // wait until the query derivation threads have all finished + boolean queryDerivationThreadsFinished = false; + while (!queryDerivationThreadsFinished) { + try { + LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " + + "waiting for other threads to finish the individual query derivations" + + " for the different DIVIDE queries", + component.getId(), context.getId()); + + // wait for the latch to be decremented by the query derivation threads + latch.await(); + + // if the previous call returns, this means that all threads have + // finished (since they all count down the latch when finished) + queryDerivationThreadsFinished = true; + + } catch (InterruptedException e) { + // interrupts of this thread should be ignored, since it is really + // required to await the latch being count down to zero + // (and only handle interrupt requests at the end of this method) + LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " + + "query update thread interrupted while waiting for other threads", + component.getId(), context.getId()); + + // it is however important to remember that this interruption happened + // while waiting + interruptedWhileWaiting = true; + } + } + + // check if thread has been interrupted up to this point + // -> if so, no query registration update should take place + // (in normal circumstances this only happens if the component is + // unregistered from the engine) + if (interruptedWhileWaiting || Thread.currentThread().isInterrupted()) { + LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " + + "not registering queries since query update thread has been interrupted", + component.getId(), context.getId()); + + // clearing registration schedule since no queries will be registered anymore + component.getRspEngineHandler().clearRegistrationSchedule(); + + // check if there already is a new general DIVIDE query derivation task + // in the queue at this point, AND the RSP engine streams are paused + // on context changes + // -> if yes, no query registration update should take place (since the + // RSP engine streams are paused, and it only makes sense to update + // them after the last general DIVIDE query derivation task in the + // queue has completed (since they are derived from the most recent + // up-to-date context) + } else if (pauseRspEngineStreamsOnContextChanges && + componentQueryUpdateQueueMap.get(component.getId()).stream().anyMatch( + queryUpdateTask -> queryUpdateTask instanceof GeneralDivideQueryDerivationTask)) { + LOGGER.info("DIVIDE query derivation for component with ID '{}' and context '{}': " + + "not registering queries since query update queue contains new " + + "general DIVIDE query derivation task", + component.getId(), context.getId()); + + // clearing registration schedule since no queries will be registered anymore + component.getRspEngineHandler().clearRegistrationSchedule(); + + } else { + // update query registration at actual RSP engine + component.getRspEngineHandler().updateRegistration(); + } + + // in any case, it is still important to try restarting the engine + // (if being interrupted explicitly because the component is being + // removed, then all updates will be stopped explicitly anyway) + // -> and if no restart is allowed because of new pause requests + if (pauseRspEngineStreamsOnContextChanges) { + component.getRspEngineHandler().restartRspEngineStreams(); + } + + LOGGER.info("Finished DIVIDE query derivation for component with ID '{}' and context '{}'", + component.getId(), context.getId()); + + return interruptedWhileWaiting || Thread.currentThread().isInterrupted(); + } + } + + private class SpecificDivideQueryDerivationTask implements IDivideQueryUpdateTask { + + private final Logger LOGGER = LoggerFactory.getLogger( + SpecificDivideQueryDerivationTask.class.getName()); + + private final IComponent component; + private final Context context; + private final IDivideQuery divideQuery; + + SpecificDivideQueryDerivationTask(IComponent component, + Context context, + IDivideQuery divideQuery) { + this.component = component; + this.context = context; + this.divideQuery = divideQuery; + } + + @Override + public boolean execute() { + LOGGER.info("Preparing specific DIVIDE query derivation for DIVIDE query '{}', " + + "for component with ID '{}' " + + "and context '{}' in RSP query update thread", + divideQuery.getName(), component.getId(), context.getId()); + + // run the query derivation script in for the given DIVIDE query, + // on a dedicated thread in the worker thread pool + CountDownLatch latch = new CountDownLatch(1); + workerThreadPool.submit(new SingleQueryDeriver( + divideQuery, context, component, + divideQueryDeriver, getDivideOntology(), latch)); + + // keep track of whether the thread gets interrupted while waiting for + // the other threads to finish + boolean interruptedWhileWaiting = false; + + // wait until the query derivation threads have all finished + boolean queryDerivationThreadsFinished = false; + while (!queryDerivationThreadsFinished) { + try { + LOGGER.info("Specific DIVIDE query derivation for DIVIDE query '{}', " + + "for component with ID '{}' and context '{}': " + + "waiting for other thread to finish the individual query derivation", + divideQuery.getName(), component.getId(), context.getId()); + + // wait for the latch to be decremented by the the query derivation threads + latch.await(); + + // if the previous call returns, this means that the thread has finished + queryDerivationThreadsFinished = true; + + } catch (InterruptedException e) { + // interrupts of this thread should be ignored, since it is really + // required to await the latch being count down to zero + // (and only handle interrupt requests at the end of this method) + LOGGER.info("Specific DIVIDE query derivation for DIVIDE query '{}', " + + "for component with ID '{}' and context '{}': " + + "query update thread interrupted while waiting for other threads", + divideQuery.getName(), component.getId(), context.getId()); + + // it is however important to remember that this interruption happened + // while waiting + interruptedWhileWaiting = true; + } + } + + if (!interruptedWhileWaiting && !Thread.currentThread().isInterrupted()) { + // update query registration at actual RSP engine for this DIVIDE query + component.getRspEngineHandler().updateRegistration(divideQuery); + } else { + LOGGER.info("Specific DIVIDE query derivation for DIVIDE query '{}', " + + "for component with ID '{}' and context '{}': " + + "not registering queries since query update thread has been interrupted", + divideQuery.getName(), component.getId(), context.getId()); + + // clearing registration schedule since no queries will be registered anymore + component.getRspEngineHandler().clearRegistrationSchedule(divideQuery); + } + + LOGGER.info("Finished DIVIDE query derivation for component with ID '{}' and context '{}'", + component.getId(), context.getId()); + + return interruptedWhileWaiting || Thread.currentThread().isInterrupted(); + } + } + + private static class DivideQueryRemovalHandlingTask implements IDivideQueryUpdateTask { + + private static final Logger LOGGER = LoggerFactory.getLogger( + DivideQueryRemovalHandlingTask.class.getName()); + + private final IComponent component; + private final IDivideQuery divideQuery; + private final boolean unregisterQueries; + + DivideQueryRemovalHandlingTask(IComponent component, + IDivideQuery divideQuery, + boolean unregisterQueries) { + this.component = component; + this.divideQuery = divideQuery; + this.unregisterQueries = unregisterQueries; + } + + @Override + public boolean execute() { + LOGGER.info("Handling the removal of DIVIDE query '{}' at component with ID '{}':" + + "unregistering context enrichers{}", + divideQuery.getName(), component.getId(), + unregisterQueries ? " and starting the unregistering of all queries" : ""); + + if (!Thread.currentThread().isInterrupted()) { + // unregister context enricher for the given component and DIVIDE query + component.unregisterContextEnricher(divideQuery); + + if (unregisterQueries) { + // update query registration at actual RSP engine + component.getRspEngineHandler(). + unregisterAllQueriesOriginatingFromDivideQuery(divideQuery); + } + } else { + LOGGER.info("Unregistering of all queries at {} associated to " + + "removed DIVIDE query {} has been interrupted", + component.getId(), divideQuery.getName()); + } + + LOGGER.info("Finished unregistering of all queries at {} associated to " + + "removed DIVIDE query {}", + component.getId(), divideQuery.getName()); + + return Thread.currentThread().isInterrupted(); + } + + } + + private class ContextEnricherUpdaterTask implements IDivideQueryUpdateTask { + + private final Logger LOGGER = LoggerFactory.getLogger( + ContextEnricherUpdaterTask.class.getName()); + + private final IComponent component; + private final Collection divideQueries; + + ContextEnricherUpdaterTask(IComponent component, + IDivideQuery divideQuery) { + this.component = component; + this.divideQueries = Collections.singletonList(divideQuery); + } + + ContextEnricherUpdaterTask(IComponent component) { + this.component = component; + this.divideQueries = new ArrayList<>(); + } + + @Override + public boolean execute() { + // retrieve list of all DIVIDE queries if no set of queries is specified for this task + if (divideQueries.isEmpty()) { + synchronized (divideQueryMap) { + divideQueries.addAll(divideQueryMap.values()); + } + } + + // stop if list of queries is empty + if (divideQueries.isEmpty()) { + LOGGER.info("Task to update context enrichers for component with ID '{}': " + + "stopped, since no DIVIDE queries are registered", + component.getId()); + return Thread.currentThread().isInterrupted(); + } + + LOGGER.info("Starting task to update context enrichers for component with ID '{}' " + + "and following queries: {}", + component.getId(), + divideQueries.stream().map(IDivideQuery::getName).collect(Collectors.joining(", "))); + long start = System.currentTimeMillis(); + + // create the context enricher for each given DIVIDE query, + // on a dedicated thread in the worker thread pool + CountDownLatch latch = new CountDownLatch(divideQueries.size()); + for (IDivideQuery divideQuery : divideQueries) { + workerThreadPool.submit(new SingleContextEnricherUpdater( + component, + divideQuery, + getDivideOntology(), + latch)); + } + + // keep track of whether the thread gets interrupted while waiting for + // the other threads to finish + boolean interruptedWhileWaiting = false; + + // wait until the context enricher creation threads have all finished + boolean threadsFinished = false; + while (!threadsFinished) { + try { + LOGGER.info("Waiting for threads to finish parallel updating of context enricher " + + "for component with ID '{}' for {} DIVIDE queries", + component.getId(), divideQueries.size()); + + // wait for the latch to be decremented by the different threads + // -> after this method call completes, all parallel tasks are finished + latch.await(); + + // if the previous call returns, this means that the thread has finished + threadsFinished = true; + + } catch (InterruptedException e) { + // interrupts of this thread should be ignored, since it is really + // required to await the latch being count down to zero + // (and only handle interrupt requests at the end of this method) + LOGGER.info("Parallel updating of context enricher for component with ID '{}': " + + "query update thread interrupted while waiting for other threads", + component.getId()); + + // it is however important to remember that this interruption happened + // while waiting + interruptedWhileWaiting = true; + } + } + + LOGGER.info("Finished updating context enrichers for component with ID '{}' and " + + "queries {} in {} ms", + component.getId(), + divideQueries.stream().map(IDivideQuery::getName).collect(Collectors.toList()), + System.currentTimeMillis() - start); + + return interruptedWhileWaiting || Thread.currentThread().isInterrupted(); + } + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngineFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngineFactory.java new file mode 100644 index 0000000..eb30fb6 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideEngineFactory.java @@ -0,0 +1,14 @@ +package be.ugent.idlab.divide.core.engine; + +public class DivideEngineFactory { + + /** + * Create and return a new DIVIDE engine. + * + * @return newly created DIVIDE engine + */ + public static synchronized IDivideEngine createInstance() { + return new DivideEngine(); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideOntology.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideOntology.java new file mode 100644 index 0000000..3f36218 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/DivideOntology.java @@ -0,0 +1,33 @@ +package be.ugent.idlab.divide.core.engine; + +import org.apache.jena.rdf.model.Model; +import org.apache.jena.reasoner.rulesys.Rule; + +import java.util.List; +import java.util.UUID; + +public class DivideOntology { + + private final String id; + private final Model model; + private final List rules; + + public DivideOntology(Model model, List rules) { + this.id = UUID.randomUUID().toString(); + this.model = model; + this.rules = rules; + } + + public String getId() { + return id; + } + + public Model getModel() { + return model; + } + + public List getRules() { + return rules; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideEngine.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideEngine.java new file mode 100644 index 0000000..55a9ca1 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideEngine.java @@ -0,0 +1,231 @@ +package be.ugent.idlab.divide.core.engine; + +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.context.ContextEnrichment; +import be.ugent.idlab.divide.core.exception.DivideInitializationException; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.core.query.parser.IDivideQueryParser; +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import be.ugent.idlab.kb.IKnowledgeBase; +import org.apache.jena.rdf.model.Model; + +import java.util.Collection; +import java.util.List; + +/** + * Main engine of DIVIDE. + * + * Takes care of the handling of DIVIDE queries, and contains references to + * instances of {@link IDivideQueryDeriver} for the query derivation. + * + * It also is responsible for managing the components within the DIVIDE system. + * It keeps track of a collection of registered {@link IComponent} instances, + * and provides methods to register, unregister and retrieve them. + * It registers itself as observer to the {@link IKnowledgeBase} of + * this engine, to trigger the query derivation for the relevant components + * on context updates. + */ +public interface IDivideEngine { + + /** + * Initializes the DIVIDE engine. + * Keeps track of the given {@link IDivideQueryDeriver} to be used for the + * query derivation when relevant knowledge base changes are observed. + * Changes to the knowledge base are only observed after this initialization + * method has successfully returned. + * Loads the ontology to the engine based on the list of ontology files. + * Creates a DIVIDE component manager to manage the DIVIDE components of this + * engine, and observe changes of the given knowledge base. + * + * @param divideQueryDeriver {@link IDivideQueryDeriver} used for the query derivation + * performed by this engine + * @param knowledgeBase {@link IKnowledgeBase} that should be observed for changes + * to know when the query derivation should be triggered + * @param divideOntology model representing all statements in the ontology that is used + * by DIVIDE for the query derivation, i.e., in the TBox of the + * knowledge base + * @param pauseRspEngineStreamsOnContextChanges boolean representing whether RSP engine + * streams on a component should be paused + * when context changes are detected that + * trigger the DIVIDE query derivation for + * that component + * @param processUnmappedVariableMatchesInParser boolean representing whether variable + * matches in the input for the DIVIDE query + * parser that are not defined as mappings, + * should be considered as mappings by default + * @param validateUnboundVariablesInRspQlQueryBodyInParser boolean representing whether variables + * in the RSP-QL query body generated by + * the DIVIDE query parser, should be + * validated (= checked for occurrence in + * the WHERE clause of the query or in the + * set of input variables that will be + * substituted during the DIVIDE query + * derivation) during parsing + * @throws DivideInitializationException if something goes wrong during the initialization + * process, which prevents the DIVIDE engine from + * functioning as it should + * @throws DivideInvalidInputException when the specified ontology contains invalid + * statements, i.e., statements which cannot be loaded + * by the query deriver + */ + void initialize(IDivideQueryDeriver divideQueryDeriver, + IKnowledgeBase knowledgeBase, + Model divideOntology, + boolean pauseRspEngineStreamsOnContextChanges, + boolean processUnmappedVariableMatchesInParser, + boolean validateUnboundVariablesInRspQlQueryBodyInParser) + throws DivideInitializationException, DivideInvalidInputException; + + /** + * Register a new DIVIDE query to this DIVIDE engine. + * The required format and language of the input parameters depends on the type + * of query deriver used - if any of the input parameters is invalid according + * to this query deriver, a {@link DivideInvalidInputException} will be thrown. + * + * @param name name of the new DIVIDE query + * @param queryPattern generic RSP-QL query pattern of this query + * @param sensorQueryRule sensor query rule to be used for the query derivation + * @param goal goal to be used for the query derivation + * @return the newly created {@link IDivideQuery} that is registered to + * the DIVIDE engine (or null if a DIVIDE query with the given name + * is already registered to the engine) + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + * @throws DivideQueryDeriverException when something goes wrong when registering the new + * DIVIDE query to the {@link IDivideQueryDeriver} of this + * engine, which prevents it from performing the query + * derivation for this query - this error has nothing to + * do with invalid parameters + * @throws DivideInvalidInputException when the registration fails because any of the new + * DIVIDE query parameters is invalid + */ + IDivideQuery addDivideQuery(String name, + String queryPattern, + String sensorQueryRule, + String goal, + ContextEnrichment contextEnrichment) + throws DivideNotInitializedException, DivideQueryDeriverException, DivideInvalidInputException; + + /** + * Removes an {@link IDivideQuery} with the given name from the list + * of queries registered to this DIVIDE engine. + * + * @param name name of query to remove from the DIVIDE engine (if no query + * with the given name is registered, nothing is done) + * @param unregisterQueries specifies whether all queries associated to this + * DIVIDE query should be unregistered on the components + * currently known by the system + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + */ + void removeDivideQuery(String name, + boolean unregisterQueries) throws DivideNotInitializedException; + + /** + * Retrieve list of {@link IDivideQuery} instances registered to this + * DIVIDE engine. + * + * @return list of DIVIDE queries registered to the engine + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + */ + Collection getDivideQueries() throws DivideNotInitializedException; + + /** + * Retrieve {@link IDivideQuery} with the given name that is registered + * to this DIVIDE engine. + * + * @param name name of the DIVIDE query to retrieve + * @return the {@link IDivideQuery} registered to the DIVIDE engine with the + * given name (null if no query with that name is registered) + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + */ + IDivideQuery getDivideQueryByName(String name) throws DivideNotInitializedException; + + /** + * Creates and registers a new {@link IComponent}. + * After successful registration (no exception is thrown or null is returned), + * this {@link IDivideEngine} performs the following task: when a + * change to any of the ABox IRIs specified as mainContextIri or + * additionalContextIris is observed, the query derivation at the associated + * {@link IDivideEngine} is triggered for this registered component. + * + * @param contextIris IRIs of the ABoxes in a knowledge base that represents the + * context associated to the new {@link IComponent} + * @param rspQueryLanguage RSP query language used by the RSP engine running on + * the created component + * @param rspEngineUrl URL which should be used for communicating with the RSP engine + * running on the created component, and which will also be mapped + * to a unique ID for the created component + * @return the new {@link IComponent} that is registered (or null if a component + * is already registered with the specified rspEngineUrl) + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + * @throws DivideInvalidInputException if any of the specified context IRIs is not valid + * (either the mainContextIri or an entry of the + * additionalContextIris list), OR if the + * rspEngineUrl is no valid URL + */ + IComponent registerComponent(List contextIris, + RspQueryLanguage rspQueryLanguage, + String rspEngineUrl) + throws DivideNotInitializedException, DivideInvalidInputException; + + /** + * Unregisters an {@link IComponent} with the given ID. + * After successful completion of this method (no null is returned), changes to + * the ABox IRIs in the knowledge base specified as context IRIs of this + * component do no longer result in triggering the query derivation process for + * this component. + * + * @param id ID of the component to unregister (if no component with the given + * ID is registered, nothing is done) + * @param unregisterQueries specifies whether all queries registered by DIVIDE on + * the RSP engine of this component should be unregistered + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + */ + void unregisterComponent(String id, + boolean unregisterQueries) throws DivideNotInitializedException; + + /** + * Retrieve all {@link IComponent} instances registered to this manager. + * + * @return all registered {@link IComponent} instances + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + */ + Collection getRegisteredComponents() throws DivideNotInitializedException; + + /** + * Retrieve {@link IComponent} with the given ID that is registered + * to this manager. + * + * @param id ID of the {@link IComponent} to retrieve + * @return the {@link IComponent} registered to this manager with the + * given ID (null if no component with that ID is registered) + * @throws DivideNotInitializedException if {@link #initialize(IDivideQueryDeriver, + * IKnowledgeBase, Model, boolean, boolean, boolean)} + * has not been called yet + */ + IComponent getRegisteredComponentById(String id) throws DivideNotInitializedException; + + /** + * Retrieve the {@link IDivideQueryParser} of this DIVIDE engine. + * + * @return the DIVIDE query parser of this DIVIDE engine + */ + IDivideQueryParser getQueryParser(); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriver.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriver.java new file mode 100644 index 0000000..ee9b3de --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriver.java @@ -0,0 +1,128 @@ +package be.ugent.idlab.divide.core.engine; + +import be.ugent.idlab.divide.core.context.Context; +import be.ugent.idlab.divide.core.exception.DivideInitializationException; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.core.query.parser.IDivideQueryParser; +import org.apache.jena.rdf.model.Model; + +/** + * Class responsible for deriving the actual RSP-QL queries from an {@link IDivideQuery}. + */ +public interface IDivideQueryDeriver { + + /** + * Loads the ontology that needs to be used as input (TBox) for each query + * derivation performed by this query deriver. + * + * If an ontology has been loaded successfully in the past at least once, + * this method will reload the ontology based on the new input. If something + * goes wrong during this reloading and an exception is thrown, the query + * deriver should still be in a valid state and continue working with the + * latest successfully loaded ontology. + * + * @param ontology representation of the ontology that should be used as TBox + * during the DIVIDE query derivation + * @throws DivideInvalidInputException when the ontology contains invalid statements, i.e., + * statements which cannot be loaded by the query deriver + * @throws DivideInitializationException when something goes wrong during the loading of the + * ontology files, which prevents the instance from + * performing the query derivation + */ + void loadOntology(Model ontology) + throws DivideInvalidInputException, DivideInitializationException; + + /** + * Register a new {@link IDivideQuery} to this query deriver, to prepare + * the engine for deriving the RSP-QL queries from this DIVIDE query. + * + * @param divideQuery {@link IDivideQuery} to be registered to this query deriver + * (if null or if a DIVIDE query with the given name is already + * registered to the engine, nothing is changed) + * @param queryParser {@link IDivideQueryParser} to be used when the query deriver wants + * to parse the context-enriching queries of the DIVIDE query to possibly + * manipulate the context enrichment + * @throws DivideQueryDeriverException when something goes wrong during the registration + * of the new DIVIDE query, which prevents the instance + * from performing the query derivation for this query + * @throws DivideInvalidInputException when the given DIVIDE query has invalid fields + */ + void registerQuery(IDivideQuery divideQuery, + IDivideQueryParser queryParser) + throws DivideQueryDeriverException, DivideInvalidInputException; + + /** + * Unregister a {@link IDivideQuery} from this query deriver. + * In this way, this query deriver knows it will no longer need to derive + * RSP-QL queries from this DIVIDE query, which means it can clean up any + * resources related to this DIVIDE query. + * + * @param divideQuery {@link IDivideQuery} to be unregistered from this query deriver + * (if null or if no DIVIDE query with the given name is registered + * to the engine, nothing is changed) + */ + void unregisterQuery(IDivideQuery divideQuery); + + /** + * Performs the actual query derivation for the {@link IDivideQuery} with the given name, + * if such a DIVIDE query is registered to this query deriver. + * Runs the query derivation with the loaded ontology (TBox) and the passed context (ABox), + * outputting a query deriver result containing a list of RSP-QL queries that should be + * registered on the component with the passed ID given the new (passed) context. + * + * @param divideQueryName name of the {@link IDivideQuery} to be used for the query derivation + * (if no DIVIDE query with this name is registered, nothing is done and + * an empty list is returned) + * @param context new context for a certain component that should be used as input for + * the query derivation + * @param componentId ID of the component for which this query derivation is run + * @return a query deriver result, containing a method to retrieve a list of RSP-QL queries + * derived from the given DIVIDE query (can be of any length), + * which should be registered on the component with the passed ID + * @throws DivideQueryDeriverException when something goes wrong during the derivation of + * the RSP-QL queries + * @throws DivideNotInitializedException if {@link #loadOntology(Model)} has not been called yet + */ + IDivideQueryDeriverResult deriveQueries(String divideQueryName, + Context context, + String componentId) + throws DivideQueryDeriverException, DivideNotInitializedException; + + /** + * Substitutes new window parameters in a previous result of running the query derivation + * via the {@link #deriveQueries(String, Context, String)} method. + * These new window parameters can for example be imposed by a monitor component. + * This methods does not perform the actual query derivation for the {@link IDivideQuery} + * with the given name, but redoes the final part of the query derivation where the + * window parameters for this query are substituted in the derived queries. The window + * parameters that should be used are passed to this method. + * + * @param divideQueryName name of the {@link IDivideQuery} to be used for the query derivation + * (if no DIVIDE query with this name is registered, nothing is done and + * an empty list is returned) + * @param windowParameters description of the new window parameters for the stream(s) defined in + * the RSP-QL query body pattern of the given DIVIDE query (if window + * parameter variables occur in the query pattern that are not redefined + * by the monitor, the statically defined window parameters will be used + * instead) + * TODO: ensure all window parameters are redefined, or maybe + * rework this part so that first the thingy still looks at the + * dynamically defined window parameters + * @param componentId ID of the component for which this window parameter substitution is run + * @return a new query deriver result, containing a method to retrieve the list of updated + * RSP-QL queries with the new window parameters substituted into, + * which should be registered on the component with the passed ID + * @throws DivideQueryDeriverException when something goes wrong during the process of generating + * the new RSP-QL queries + * @throws DivideNotInitializedException if {@link #loadOntology(Model)} has not been called yet + */ + IDivideQueryDeriverResult substituteWindowParameters(String divideQueryName, + Model windowParameters, + String componentId, + IDivideQueryDeriverResult lastResult) + throws DivideQueryDeriverException, DivideNotInitializedException; + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriverResult.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriverResult.java new file mode 100644 index 0000000..7ca14a5 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryDeriverResult.java @@ -0,0 +1,13 @@ +package be.ugent.idlab.divide.core.engine; + +import java.util.List; + +public interface IDivideQueryDeriverResult { + + /** + * @return a list of substituted RSP-QL queries being the result of performing + * the DIVIDE query derivation and/or window parameter substitution + */ + List getSubstitutedRspQlQueries(); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryUpdateTask.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryUpdateTask.java new file mode 100644 index 0000000..c6d5cd5 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/IDivideQueryUpdateTask.java @@ -0,0 +1,13 @@ +package be.ugent.idlab.divide.core.engine; + +public interface IDivideQueryUpdateTask { + + /** + * Execute this query update task. + * + * @return true if the update thread on which this task is running was + * interrupted during the execution of this task, false otherwise + */ + boolean execute(); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleContextEnricherUpdater.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleContextEnricherUpdater.java new file mode 100644 index 0000000..3506486 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleContextEnricherUpdater.java @@ -0,0 +1,124 @@ +package be.ugent.idlab.divide.core.engine; + +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.context.ContextEnricherFactory; +import be.ugent.idlab.divide.core.context.IContextEnricher; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.CountDownLatch; + +/** + * Runnable to be used when the {@link IContextEnricher} associated to a given + * DIVIDE {@link IComponent} and {@link IDivideQuery} should be updated such that + * a {@link IContextEnricher} is created if it did not exist yet, and that the + * given {@link DivideOntology} is registered on this context enricher. + * + * This can be parallelized with doing the same task for the other DIVIDE queries + * registered at the same component (if this task is also required for these queries). + */ +class SingleContextEnricherUpdater implements Runnable { + + private static final Logger LOGGER = + LoggerFactory.getLogger(SingleContextEnricherUpdater.class.getName()); + + private final IComponent component; + private final IDivideQuery divideQuery; + private final DivideOntology divideOntology; + private final CountDownLatch latch; + + /** + * Creates a runnable of which the {@link #run()} method should ensure that + * a {@link IContextEnricher} is associated to the given {@link IComponent} and + * {@link IDivideQuery}, and that this context enricher has the given + * {@link DivideOntology} registered. + * This means that this runnable will create a context enricher if no context + * enricher is associated yet to the given pair of {@link IComponent} and + * {@link IDivideQuery}, and that the given {@link DivideOntology} is registered + * if no or another ontology is currently registered to this context enricher. + * + * When the task of this runnable is finished, i.e., at the end of the + * {@link #run()} method, the given {@link CountDownLatch} should be decremented. + * + * @param component component of which this runnable should check the associated + * context enricher + * @param divideQuery DIVIDE query of which this runnable should check the + * associated context enricher + * @param divideOntology DIVIDE ontology that should be registered at the context + * enricher associated to the given component & DIVIDE query + * @param latch latch to be decremented when this runnable finishes its job + */ + SingleContextEnricherUpdater(IComponent component, + IDivideQuery divideQuery, + DivideOntology divideOntology, + CountDownLatch latch) { + this.component = component; + this.divideQuery = divideQuery; + this.divideOntology = divideOntology; + this.latch = latch; + } + + /** + * Creates a runnable of which the {@link #run()} method should ensure that + * a {@link IContextEnricher} is associated to the given {@link IComponent} and + * {@link IDivideQuery}, and that this context enricher has the given + * {@link DivideOntology} registered. + * This means that this runnable will create a context enricher if no context + * enricher is associated yet to the given pair of {@link IComponent} and + * {@link IDivideQuery}, and that the given {@link DivideOntology} is registered + * if no or another ontology is currently registered to this context enricher. + * + * @param component component of which this runnable should check the associated + * context enricher + * @param divideQuery DIVIDE query of which this runnable should check the + * associated context enricher + * @param divideOntology DIVIDE ontology that should be registered at the context + * enricher associated to the given component & DIVIDE query + */ + SingleContextEnricherUpdater(IComponent component, + IDivideQuery divideQuery, + DivideOntology divideOntology) { + this.component = component; + this.divideQuery = divideQuery; + this.divideOntology = divideOntology; + this.latch = null; + } + + @Override + public void run() { + LOGGER.info("Updating context enricher for component with ID '{}' and DIVIDE " + + "query with name '{}', and DIVIDE ontology with ID '{}'", + component.getId(), divideQuery.getName(), divideOntology.getId()); + + // check if a context enricher is already registered for the given + // combination of DIVIDE component and DIVIDE query + IContextEnricher contextEnricher = component.getContextEnricher(divideQuery); + + // -> if not, action should be taken + if (contextEnricher == null) { + LOGGER.info("Creating context enricher for component with ID '{}' and DIVIDE " + + "query with name '{}' (none exists yet)", + component.getId(), divideQuery.getName()); + + // first create a new context enricher + contextEnricher = ContextEnricherFactory.createInstance( + divideQuery.getContextEnrichment(), + component.getId()); + + // register the context enricher for the DIVIDE query to the component + component.registerContextEnricher(divideQuery, contextEnricher); + } + + // register the ontology triples & rules to the context enricher + // -> if needed, an inference model can be built in parallel + contextEnricher.registerOntology(divideOntology); + + // if a latch is specified, count it down to let the calling thread + // know that this updating task has finished + if (latch != null) { + latch.countDown(); + } + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleQueryDeriver.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleQueryDeriver.java new file mode 100644 index 0000000..db3fe55 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/engine/SingleQueryDeriver.java @@ -0,0 +1,124 @@ +package be.ugent.idlab.divide.core.engine; + +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.context.Context; +import be.ugent.idlab.divide.core.context.IContextEnricher; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.util.LogConstants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.concurrent.CountDownLatch; + +/** + * Runnable which has the task of performing the derivation of a single + * {@link IDivideQuery} on the context associated to a certain {@link IComponent}. + * + * This can be parallelized with doing the same task for the other DIVIDE queries + * registered at the same component (if this task is also required for these queries). + */ +class SingleQueryDeriver implements Runnable { + + private static final Logger LOGGER = LoggerFactory.getLogger(SingleQueryDeriver.class.getName()); + + private final IDivideQuery divideQuery; + private final Context context; + private final IComponent component; + private final IDivideQueryDeriver divideQueryDeriver; + private final DivideOntology divideOntology; + private final CountDownLatch latch; + + SingleQueryDeriver(IDivideQuery divideQuery, + Context context, + IComponent component, + IDivideQueryDeriver divideQueryDeriver, + DivideOntology divideOntology, + CountDownLatch latch) { + this.divideQuery = divideQuery; + this.context = context; + this.component = component; + this.divideQueryDeriver = divideQueryDeriver; + this.divideOntology = divideOntology; + this.latch = latch; + } + + @Override + public void run() { + long start = System.currentTimeMillis(); + + LOGGER.info("Running DIVIDE query derivation for query '{}' (for component with ID '{}'," + + "and context '{}'))", + divideQuery.getName(), component.getId(), context.getId()); + + try { + // run context enricher updater runnable in this thread to ensure + // that a context enricher exists for the given combination of DIVIDE + // component and DIVIDE query, and that the given ontology is registered + // to the context enricher + SingleContextEnricherUpdater contextEnricherUpdater = + new SingleContextEnricherUpdater( + component, divideQuery, divideOntology); + contextEnricherUpdater.run(); + + // copy context for this DIVIDE query (to avoid overlap) + Context copiedContext = context.copy(); + LOGGER.info("Running DIVIDE query derivation for query '{}' (for component with ID '{}'): " + + "copy context '{}' to new context '{}'", + divideQuery.getName(), component.getId(), context.getId(), copiedContext.getId()); + + // then first enrich the context with the context enricher registered + // at the given DIVIDE component for the given DIVIDE query + IContextEnricher contextEnricher = component.getContextEnricher(divideQuery); + contextEnricher.enrichContext(copiedContext); + + // derive all query instances for the given DIVIDE query name and up-to-date context + // -> what about the exceptions? + // * DivideNotInitializedException is impossible: this is only called if an IRI + // for a specific component is updated, and components cannot be registered + // to the DIVIDE engine if it has not been initialized + // * DivideQueryDeriverException: is possible if issues occur in the EYE reasoning + // scripts; real EYE errors are unlikely since all input is valid by definition + // (is either query fields which are validated upon registration of a query, + // controlled static inputs of DIVIDE which are known to be valid, or outputs of + // previous reasoning steps); I/O errors can of course never be ruled out + // * other possible unchecked exceptions: always possible + // -> any exception should ALWAYS be caught and ignored, since otherwise the query + // update processing queue of this component will block FOREVER (since it is + // waiting for each started thread, including this one, to count down the latch) + // => whatever the exception is, this thread should simply stop and count down + // the latch, without having scheduled any queries for registration at the + // RSP engine handler + IDivideQueryDeriverResult divideQueryDeriverResult = divideQueryDeriver.deriveQueries( + divideQuery.getName(), copiedContext, component.getId()); + List substitutedQueries = divideQueryDeriverResult.getSubstitutedRspQlQueries(); + + // schedule each new query for registration + for (String query : substitutedQueries) { + component.getRspEngineHandler().scheduleForRegistration(query, divideQuery); + } + + LOGGER.info("Finished DIVIDE query derivation for query '{}' in {} milliseconds" + + " (for component with ID '{}', and context '{}')", + divideQuery.getName(), System.currentTimeMillis() - start, + component.getId(), copiedContext.getId()); + + } catch (Exception e) { + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Error during the DIVIDE query derivation for query '{}' " + + "(for component with ID '{}', and context '{}')", + divideQuery.getName(), component.getId(), context.getId(), e); + // TODO: 29/01/2021 do something with fact that not all required queries + // might be running? (I think an important part of the monitor will be monitoring + // for errors and send these errors on the monitoring stream so that depending on + // the use case, action can be taken when such an event occurs) + + } finally { + // whatever happens along the way, count down latch at the end so + // the main query derivation thread (in DivideEngine class) is not + // blocked forever + latch.countDown(); + } + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideException.java new file mode 100644 index 0000000..5f89287 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideException.java @@ -0,0 +1,20 @@ +package be.ugent.idlab.divide.core.exception; + +/** + * General exception describing known DIVIDE errors. + */ +public abstract class DivideException extends Exception { + + public DivideException(String description, Exception base) { + super(description, base); + } + + public DivideException(String description) { + super(description); + } + + public DivideException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInitializationException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInitializationException.java new file mode 100644 index 0000000..9201f25 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInitializationException.java @@ -0,0 +1,19 @@ +package be.ugent.idlab.divide.core.exception; + +/** + * Exception thrown when an error occurs during the initialization of a + * DIVIDE object, which causes the object to not be correctly initialized, + * and therefore prevents this object from functioning as it should. + */ +@SuppressWarnings("unused") +public class DivideInitializationException extends DivideException { + + public DivideInitializationException(String description, Exception base) { + super(description, base); + } + + public DivideInitializationException(String description) { + super(description); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInvalidInputException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInvalidInputException.java new file mode 100644 index 0000000..92cb5b2 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideInvalidInputException.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.core.exception; + +/** + * Exception thrown when an error occurs because the input provided to a DIVIDE + * object (from the outside) is invalid. + */ +@SuppressWarnings("unused") +public class DivideInvalidInputException extends DivideException { + + public DivideInvalidInputException(String description, Exception base) { + super(description, base); + } + + public DivideInvalidInputException(String description) { + super(description); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideNotInitializedException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideNotInitializedException.java new file mode 100644 index 0000000..0b3ceb4 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideNotInitializedException.java @@ -0,0 +1,27 @@ +package be.ugent.idlab.divide.core.exception; + +/** + * Exception thrown when a method of a DIVIDE object is called when it has not + * been initialized, and this object should be initialized first before this + * method can be called. + */ +@SuppressWarnings("unused") +public class DivideNotInitializedException extends DivideException { + + public DivideNotInitializedException(String description, Exception base) { + super(description, base); + } + + public DivideNotInitializedException(String description) { + super(description); + } + + public DivideNotInitializedException(Exception base) { + super("DIVIDE engine has not been initialized", base); + } + + public DivideNotInitializedException() { + super("DIVIDE engine has not been initialized"); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideQueryDeriverException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideQueryDeriverException.java new file mode 100644 index 0000000..ad212df --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/exception/DivideQueryDeriverException.java @@ -0,0 +1,24 @@ +package be.ugent.idlab.divide.core.exception; + +/** + * Exception thrown when an error occurs related to the query derivation process + * of DIVIDE. This can be during the query derivation, but also during the + * registration of DIVIDE queries in preparation of the query derivation for + * these DIVIDE queries. + */ +@SuppressWarnings("unused") +public class DivideQueryDeriverException extends DivideException { + + public DivideQueryDeriverException(String description, Exception base) { + super(description, base); + } + + public DivideQueryDeriverException(String description) { + super(description); + } + + public DivideQueryDeriverException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQuery.java new file mode 100644 index 0000000..b6044a2 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQuery.java @@ -0,0 +1,74 @@ +package be.ugent.idlab.divide.core.query; + +import be.ugent.idlab.divide.core.context.ContextEnrichment; + +import java.util.Objects; + +class DivideQuery implements IDivideQuery { + + private final String name; + private final String queryPattern; + private final String sensorQueryRule; + private final String goal; + private ContextEnrichment contextEnrichment; + + DivideQuery(String name, + String queryPattern, + String sensorQueryRule, + String goal, + ContextEnrichment contextEnrichment) { + this.name = name; + this.queryPattern = queryPattern; + this.sensorQueryRule = sensorQueryRule; + this.goal = goal; + this.contextEnrichment = contextEnrichment; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getQueryPattern() { + return queryPattern; + } + + @Override + public String getSensorQueryRule() { + return sensorQueryRule; + } + + @Override + public String getGoal() { + return goal; + } + + @Override + public ContextEnrichment getContextEnrichment() { + return contextEnrichment; + } + + @Override + public void removeContextEnrichment() { + this.contextEnrichment = new ContextEnrichment(); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + DivideQuery that = (DivideQuery) o; + return name.equals(that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQueryFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQueryFactory.java new file mode 100644 index 0000000..8554396 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/DivideQueryFactory.java @@ -0,0 +1,27 @@ +package be.ugent.idlab.divide.core.query; + +import be.ugent.idlab.divide.core.context.ContextEnrichment; + +public class DivideQueryFactory { + + /** + * Create a new DIVIDE query with the given parameters. + * + * @param queryName name of the DIVIDE query + * @param queryPattern generic query pattern used during query derivation + * @param sensorQueryRule sensor query rule used during query derivation + * @param goal goal used during query derivation + * @param contextEnrichment the context enrichment to be used at the start + * of the query derivation + * @return created DIVIDE query + */ + public static IDivideQuery createInstance(String queryName, + String queryPattern, + String sensorQueryRule, + String goal, + ContextEnrichment contextEnrichment) { + return new DivideQuery( + queryName, queryPattern, sensorQueryRule, goal, contextEnrichment); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/IDivideQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/IDivideQuery.java new file mode 100644 index 0000000..f666cdb --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/IDivideQuery.java @@ -0,0 +1,25 @@ +package be.ugent.idlab.divide.core.query; + +import be.ugent.idlab.divide.core.context.ContextEnrichment; + +/** + * Representation of a generic query within DIVIDE, of which specific initialized + * query instances can be derived. + * It has a name, a query pattern (which needs to be substituted), + * a sensor query rule used for the query derivation, and a goal used for the query derivation. + */ +public interface IDivideQuery { + + String getName(); + + String getQueryPattern(); + + String getSensorQueryRule(); + + String getGoal(); + + ContextEnrichment getContextEnrichment(); + + void removeContextEnrichment(); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/CleanDivideQueryParserInput.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/CleanDivideQueryParserInput.java new file mode 100644 index 0000000..b612ce4 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/CleanDivideQueryParserInput.java @@ -0,0 +1,71 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class CleanDivideQueryParserInput extends DivideQueryParserInput { + + private final Map variableMapping; + private final Map reverseVariableMapping; + private Set unboundVariables; + private Map finalQueryVariableMapping; + + CleanDivideQueryParserInput(InputQueryLanguage inputQueryLanguage, + List streamWindows, + String streamQuery, + List intermediateQueries, + String finalQuery, + String solutionModifier, + Map variableMapping) { + super(inputQueryLanguage, streamWindows, streamQuery, intermediateQueries, + finalQuery, solutionModifier, null); + + this.variableMapping = variableMapping; + this.reverseVariableMapping = new HashMap<>(); + this.variableMapping.forEach((k, v) -> this.reverseVariableMapping.put(v, k)); + + this.finalQueryVariableMapping = new HashMap<>(); + } + + CleanDivideQueryParserInput(DivideQueryParserInput input) { + super(input.getInputQueryLanguage(), + input.getStreamWindows(), + input.getStreamQuery(), + input.getIntermediateQueries(), + input.getFinalQuery(), + input.getSolutionModifier(), + input.getStreamToFinalQueryVariableMapping()); + + this.variableMapping = new HashMap<>(); + this.reverseVariableMapping = new HashMap<>(); + + this.finalQueryVariableMapping = new HashMap<>(); + } + + Map getVariableMapping() { + return variableMapping; + } + + public Map getReverseVariableMapping() { + return reverseVariableMapping; + } + + void setUnboundVariables(Set unboundVariables) { + this.unboundVariables = unboundVariables; + } + + Set getUnboundVariables() { + return unboundVariables; + } + + public void setFinalQueryVariableMapping(Map finalQueryVariableMapping) { + this.finalQueryVariableMapping = finalQueryVariableMapping; + } + + public Map getFinalQueryVariableMapping() { + return finalQueryVariableMapping; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ConvertedStreamWindow.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ConvertedStreamWindow.java new file mode 100644 index 0000000..9ae9a3b --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ConvertedStreamWindow.java @@ -0,0 +1,31 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.List; + +public class ConvertedStreamWindow extends StreamWindow { + + private final List windowParameters; + + public ConvertedStreamWindow(String streamIri, + String windowDefinition, + List windowParameters) { + super(streamIri, windowDefinition); + + this.windowParameters = windowParameters; + } + + public List getWindowParameters() { + return windowParameters; + } + + @Override + public String toString() { + return "ConvertedStreamWindow{" + + "windowParameters=" + windowParameters + + ", streamIri='" + streamIri + '\'' + + ", windowDefinition='" + windowDefinition + '\'' + + ", defaultWindowParameterValues=" + defaultWindowParameterValues + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryGenerator.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryGenerator.java new file mode 100644 index 0000000..a3a8367 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryGenerator.java @@ -0,0 +1,504 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +class DivideQueryGenerator { + + static long COUNTER = 1; + + private static final Set DIVIDE_PREFIXES = new HashSet<>(); + private static final Set DIVIDE_PREFIX_NAMES; + private static final Map DIVIDE_PREFIX_MAP; + + static { + DIVIDE_PREFIXES.add(new Prefix(":", "")); + DIVIDE_PREFIXES.add(new Prefix("sd:", "")); + DIVIDE_PREFIXES.add(new Prefix("sh:", "")); + DIVIDE_PREFIXES.add(new Prefix("owl:", "")); + DIVIDE_PREFIXES.add(new Prefix("rdf:", "")); + DIVIDE_PREFIXES.add(new Prefix("xsd:", "")); + + DIVIDE_PREFIX_MAP = new HashMap<>(); + for (Prefix dividePrefix : DIVIDE_PREFIXES) { + DIVIDE_PREFIX_MAP.put(dividePrefix.getName(), dividePrefix); + } + + DIVIDE_PREFIX_NAMES = DIVIDE_PREFIX_MAP.keySet(); + } + + private static final String TURTLE_PREFIX_TEMPLATE = "@prefix %s %s ."; + + private static final String SHACL_PREFIX_DECLARATION_TEMPLATE = + ":prefixes-%d sh:declare [ sh:prefix \"%s\" ; sh:namespace \"%s\"^^xsd:anyURI ] ."; + + private static final String QUERY_PATTERN_TEMPLATE = + "%s\n" + ":prefixes-%d rdf:type owl:Ontology .\n%s\n" + + ":pattern rdf:type sd:QueryPattern ; " + + "sh:prefixes :prefixes-%d ; sh:%s \"\"\"%s\"\"\"."; + + private static final String SENSOR_QUERY_RULE_TEMPLATE = + "%s\n" + + "{\n%s\n}\n=>\n{\n" + + "_:q rdf:type sd:Query ;\n" + + " sd:pattern :pattern ;\n" + + " sd:inputVariables (%s) ;\n" + + " sd:windowParameters (%s) ;\n" + + " sd:outputVariables (%s) .\n" + + "\n%s\n} ."; + + private static final String SENSOR_QUERY_RULE_ADDITIONAL_RULE_TEMPLATE = + "{\n%s\n}\n=>\n{\n%s\n} ."; + + private static final String SENSOR_QUERY_RULE_INPUT_OUTPUT_VARIABLE_TEMPLATE = "(\"%s\" %s)"; + + private static final String SENSOR_QUERY_RULE_WINDOW_PARAMETER_TEMPLATE = "(\"%s\" %s %s)"; + + private static final Map + windowParameterTypeMapping = new HashMap<>(); + static { + windowParameterTypeMapping.put(WindowParameter.WindowParameterType.XSD_DURATION, + ""); + windowParameterTypeMapping.put(WindowParameter.WindowParameterType.TIME_SECONDS, + ""); + windowParameterTypeMapping.put(WindowParameter.WindowParameterType.TIME_MINUTES, + ""); + windowParameterTypeMapping.put(WindowParameter.WindowParameterType.TIME_HOURS, + ""); + } + + private static final String GOAL_TEMPLATE = "%s\n{\n%s\n}\n=>\n{\n%s\n} ."; + + private static final String RSP_QL_QUERY_BODY_TEMPLATE = "%s\n%s\n%s\nWHERE {\n%s\n}\n%s"; + + private static final String RSP_QL_QUERY_BODY_FROM_TEMPLATE = + "FROM NAMED WINDOW :win%d ON %s [%s]"; + + private static final String RSP_QL_QUERY_BODY_WHERE_GRAPH_TEMPLATE = "WINDOW :win%d {\n%s\n}"; + + /** + * @param queryForm query form of the RSP-QL query template for which + * this pattern is created + * @param prefixes set of prefixes used in the RSP-QL query body + * @param rspQlQueryBody RSP-QL query body of the query template for which + * this pattern is created; this should be the output + * of the {@link #createRspQlQueryBody(QueryForm, String, + * List, String, List, DivideQueryParser)} method + * + * @return query pattern of the DIVIDE query + */ + String createQueryPattern(QueryForm queryForm, + Set prefixes, + String rspQlQueryBody) { + Set dividePrefixes = new HashSet<>(DIVIDE_PREFIXES); + + // loop over prefixes + Set prefixesPresent = new HashSet<>(); + for (Prefix prefix : prefixes) { + if (Pattern.compile("(\\s|\\(|^|\\^)" + prefix.getName() + "(?!win[0-9]+\\s)") + .matcher(rspQlQueryBody).find()) { + if (":".equals(prefix.getName())) { + // a prefix without a name cannot be defined in SHACL, so should + // be replaced with a DIVIDE prefix + Prefix newPrefix = new Prefix( + String.format("divide-%s:", UUID.randomUUID()), + prefix.getUri()); + + // update prefix set + prefixesPresent.add(newPrefix); + + // update RSP-QL query body according to new prefix + Pattern replacingPattern = Pattern.compile("(\\s|\\(|^|\\^):(?!win[0-9]+\\s)"); + Matcher m = replacingPattern.matcher(rspQlQueryBody); + rspQlQueryBody = m.replaceAll("$1" + newPrefix.getName()); + + } else { + // only include in prefix set if prefix occurs in RSP-QL query body + prefixesPresent.add(prefix); + } + } + } + + // update DIVIDE prefixes and template if prefix conflicts exist + List templates = new ArrayList<>(); + templates.add(QUERY_PATTERN_TEMPLATE); + templates.add(SHACL_PREFIX_DECLARATION_TEMPLATE); + templates = solveConflictsWithDividePrefixes(templates, prefixesPresent, dividePrefixes); + + return String.format(templates.get(0), + getTurtlePrefixList(dividePrefixes), + COUNTER, + getShaclPrefixList(prefixesPresent, templates.get(1)), + COUNTER, + queryForm.toString().toLowerCase(), + rspQlQueryBody); + } + + /** + * @param prefixes set of prefixes used in the sensor query rule content + * (both context part and stream query result, and in possible + * additional queries) + * @param contextPart context part of stream query which is used as antecedent + * of the sensor query rule + * @param streamQueryResult stream query result which is part of the consequence + * of the sensor query rule + * @param inputVariables input variables from the antecedent that need to be substituted + * into the consequence (including pattern) and therefore need to + * be defined as input variables in this sensor query rule + * @param outputVariables output variables in the stream query result (i.e., variables + * not occurring in the antecedent) that therefore need to be + * substituted into blank nodes in the consequence of the created + * sensor query rule + * @param additionalQueries parsed additional SPARQL queries that are executed + * between the first stream-dependent query and the final + * query yielding the query result + * + * @return sensor query rule for the DIVIDE query, extended with an addition rule + * for each additional query (if existing) + */ + String createSensorQueryRule(Set prefixes, + String contextPart, + String streamQueryResult, + List inputVariables, + List windowParameters, + List outputVariables, + List additionalQueries) { + Set dividePrefixes = new HashSet<>(DIVIDE_PREFIXES); + + // update DIVIDE prefixes and template if prefix conflicts exist + List templates = Collections.singletonList(SENSOR_QUERY_RULE_TEMPLATE); + templates = solveConflictsWithDividePrefixes(templates, prefixes, dividePrefixes); + + // merge all prefixes + // (merging can happen without any issues since the documentation + // mentions that this method expects no overlap between the prefix + // sets) + Set allPrefixes = new HashSet<>(dividePrefixes); + allPrefixes.addAll(prefixes); + + // generate string of input variables + String inputVariablesString = inputVariables + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .map(s -> String.format(SENSOR_QUERY_RULE_INPUT_OUTPUT_VARIABLE_TEMPLATE, s, s)) + .collect(Collectors.joining(" ")); + + // generate string of window variables + String windowVariablesString = windowParameters + .stream() + .map(s -> String.format(SENSOR_QUERY_RULE_WINDOW_PARAMETER_TEMPLATE, + s.getVariable(), + !s.isValueSubstitutionVariable() && + s.getType() == WindowParameter.WindowParameterType.XSD_DURATION ? + "\"" + s.getValue() + "\"" : s.getValue(), + windowParameterTypeMapping.get(s.getType()))) + .collect(Collectors.joining(" ")); + + // process all output variables + List outputVariablesList = new ArrayList<>(); + for (String outputVariable : outputVariables) { + // create blank node for each output variable + String blank = outputVariable.replaceFirst(Pattern.quote("?"), "_:"); + + // generate string to add to list output variables + outputVariablesList.add(String.format( + SENSOR_QUERY_RULE_INPUT_OUTPUT_VARIABLE_TEMPLATE, + outputVariable, blank)); + + // replace output variable by its blank node in the stream query result, + // which ends up in the consequence of the sensor query rule + streamQueryResult = streamQueryResult.replaceAll( + Pattern.quote(outputVariable), blank); + } + String outputVariablesString = String.join(" ", outputVariablesList); + + // create sensor query rule string + String sensorQueryRule = String.format(templates.get(0), + getTurtlePrefixList(allPrefixes), + contextPart, + inputVariablesString, + windowVariablesString, + outputVariablesString, + streamQueryResult); + + // create additional rule string for each additional query + // (WHERE clause as antecedent, CONSTRUCT clause as consequence) + List additionalRules = new ArrayList<>(); + for (ParsedSparqlQuery additionalQuery : additionalQueries) { + additionalRules.add(String.format(SENSOR_QUERY_RULE_ADDITIONAL_RULE_TEMPLATE, + additionalQuery.getSplitSparqlQuery().getWherePart(), + additionalQuery.getSplitSparqlQuery().getResultPart())); + } + + // create the actual sensor query rule and append the additional rules to it + return String.format("%s\n\n%s", + sensorQueryRule, + String.join("\n\n", additionalRules)); + } + + /** + * @param prefixes set of prefixes used in the goal's antecedent and consequence + * @param antecedent antecedent of the rule that makes up the goal + * @param consequence consequence of the rule that makes up the goal + * + * @return goal for the DIVIDE query + */ + String createGoal(Set prefixes, + String antecedent, + String consequence) { + String prefixString = String.join(" ", getTurtlePrefixList(prefixes)); + return String.format(GOAL_TEMPLATE, + prefixString, + antecedent, + consequence); + } + + /** + * @param queryForm form of the last query in the chain of input queries, that also + * needs to be used as form in the RSP-QL query body for the DIVIDE + * query (can either be CONSTRUCT, SELECT or ASK) + * @param queryOutput output of the last query in the chain of input queries, that + * also needs to be the output of the RSP-QL query body for the + * DIVIDE query + * @param whereClauseItems ordered list of WHERE clause items that are either graphs + * clauses on a stream IRI or SPARQL expressions; this list + * will be processed to generate the WHERE clause for the + * created RSP-QL query body; this list should contain at + * least 1 graph clause on a stream IRI + * @param solutionModifier solution modifier of the resulting RSP-QL query as defined + * in the input + * @param streamWindows stream windows defined in the parser input, which should contain + * an entry for each stream IRI specified in the graph WHERE clause + * items (together with the window parameters for this stream IRI) + * + * @return the RSP-QL query body to be used in the query pattern of the DIVIDE query + * + * @throws InvalidDivideQueryParserInputException if the stream windows list does not contain + * a stream window with a graph IRI that appears + * in the where clause graph items that make up + * the RSP-QL query body + */ + RspQlQueryBody createRspQlQueryBody(QueryForm queryForm, + String queryOutput, + List whereClauseItems, + String solutionModifier, + List streamWindows, + DivideQueryParser parser) + throws InvalidDivideQueryParserInputException { + // create set of distinct stream graph names (IRIs) in the set of + // WHERE clause items + Set inputStreamGraphs = new HashSet<>(); + for (WhereClauseItem whereClauseItem : whereClauseItems) { + if (whereClauseItem.getItemType() == WhereClauseItemType.GRAPH) { + WhereClauseGraphItem graphItem = + (WhereClauseGraphItem) whereClauseItem; + inputStreamGraphs.add(graphItem.getGraph().getName()); + } + } + + // keep track of to which window number the different stream graph + // names are mapped + Map streamGraphToWindowNumberMap = new HashMap<>(); + + // create FROM clauses + List fromParts = new ArrayList<>(); + int windowCounter = 0; + for (String inputStreamGraph : inputStreamGraphs) { + // filter list of input stream windows with the window that + // has the same IRI (name) + Optional matchingWindow = streamWindows + .stream() + .filter(streamWindow -> streamWindow.getStreamIri().equals(inputStreamGraph)) + .findFirst(); + + // if such a window is not present, an exception should be thrown, + // because then there is no input about the window parameters for + // this IRI + if (!matchingWindow.isPresent()) { + throw new InvalidDivideQueryParserInputException( + String.format("Window parameters of input stream '%s' are not " + + "specified in input", inputStreamGraph)); + } + + // otherwise, the FROM clause of this window can be generated and the + // window number is saved to the map + // -> first, the unbound variables still need to be replaced in window + String windowDefinition = matchingWindow.get().getWindowDefinition(); + fromParts.add(String.format(RSP_QL_QUERY_BODY_FROM_TEMPLATE, + windowCounter, + inputStreamGraph, + windowDefinition)); + streamGraphToWindowNumberMap.put(inputStreamGraph, windowCounter++); + } + String fromPart = String.join("\n", fromParts); + + // construct WHERE clause + StringBuilder whereClause = new StringBuilder(); + if (inputStreamGraphs.size() == 1) { + // if there is only 1 input stream graph, all expressions in the WHERE + // clause items can be grouped under that same graph + String graphName = ""; + for (WhereClauseItem whereClauseItem : whereClauseItems) { + if (whereClauseItem.getItemType() == WhereClauseItemType.EXPRESSION) { + WhereClauseExpressionItem expressionItem = + (WhereClauseExpressionItem) whereClauseItem; + whereClause.append(expressionItem.getExpression()).append(" "); + + } else if (whereClauseItem.getItemType() == WhereClauseItemType.GRAPH) { + WhereClauseGraphItem graphItem = + (WhereClauseGraphItem) whereClauseItem; + whereClause.append(graphItem.getGraph().getClause()).append(" "); + graphName = graphItem.getGraph().getName(); + + } + } + + // this means 1 graph pattern is created on the window with the correct number + // -> this makes up the whole WHERE clause of the query + whereClause = new StringBuilder(String.format(RSP_QL_QUERY_BODY_WHERE_GRAPH_TEMPLATE, + streamGraphToWindowNumberMap.get(graphName), + whereClause)); + + } else { + // if there is more than 1 input stream graph, all expressions in the WHERE + // clause items (that were not grouped under a graph, i.e., of item type EXPRESSION) + // are also appended to the WHERE clause in the same way (i.e., not under a graph) + List whereClauseParts = new ArrayList<>(); + for (WhereClauseItem whereClauseItem : whereClauseItems) { + if (whereClauseItem.getItemType() == WhereClauseItemType.EXPRESSION) { + WhereClauseExpressionItem expressionItem = + (WhereClauseExpressionItem) whereClauseItem; + // so expression items are just added as such to the WHERE clause + whereClauseParts.add(expressionItem.getExpression()); + + } else if (whereClauseItem.getItemType() == WhereClauseItemType.GRAPH) { + WhereClauseGraphItem graphItem = + (WhereClauseGraphItem) whereClauseItem; + // graph items are added as a graph pattern on the window with the correct number + whereClauseParts.add(String.format(RSP_QL_QUERY_BODY_WHERE_GRAPH_TEMPLATE, + streamGraphToWindowNumberMap.get(graphItem.getGraph().getName()), + graphItem.getGraph().getClause())); + + } + } + // in this case, the WHERE clause consists of the ordered string of all + // created individual parts + whereClause = new StringBuilder(String.join("\n", whereClauseParts)); + } + + // generate query body string + String queryFormString = queryForm == QueryForm.CONSTRUCT + ? String.format("{ %s }", queryOutput) + : (queryForm == QueryForm.ASK ? "" : queryOutput); + String queryBody = String.format(RSP_QL_QUERY_BODY_TEMPLATE, + queryForm.toString(), + queryFormString, + fromPart, + whereClause, + solutionModifier); + + // collect all unbound variables in RSP-QL query body + // -> ignore from part when doing general search + // -> ignore unbound variables in stream windows since they will be used + // as window parameters instead of input variables + Set unboundVariables = new HashSet<>( + parser.findUnboundVariables(String.format(RSP_QL_QUERY_BODY_TEMPLATE, + queryForm, + queryFormString, + "", + whereClause, + solutionModifier))); + + return new RspQlQueryBody(queryBody, unboundVariables, queryForm, + queryFormString, whereClause.toString()); + } + + /** + * Solves any conflicts with the set of used prefixes in the given template + * and with the given set of DIVIDE prefixes. + * + * @param templates templates to be checked + * @param usedPrefixes set of prefixes that is used, with which no conflicts + * may occur + * @param dividePrefixes set of prefixes that will be used for the DIVIDE IRIs + * in the given template; this set will be modified if + * any prefix conflicts occur (conflicting prefixes are + * then replaced by the new unambiguous ones) + * @return modified templates where any prefix conflicts are resolved, i.e., + * where conflicting prefixes are replaced by an unambiguous new one + */ + private List solveConflictsWithDividePrefixes(List templates, + Set usedPrefixes, + Set dividePrefixes) { + for (Prefix prefix : usedPrefixes) { + if (DIVIDE_PREFIX_NAMES.contains(prefix.getName())) { + // retrieve prefix + Prefix conflictingPrefix = DIVIDE_PREFIX_MAP.get(prefix.getName()); + + // it is only a real conflict if the URI differs + if (!prefix.getUri().equals(conflictingPrefix.getUri())) { + // create new prefix + Prefix newPrefix = new Prefix( + String.format("divide-%s:", UUID.randomUUID()), + conflictingPrefix.getUri()); + + // update prefix set + dividePrefixes.remove(conflictingPrefix); + dividePrefixes.add(newPrefix); + + // update prefix template + List newTemplates = new ArrayList<>(); + for (String template : templates) { + Pattern replacingPattern = + Pattern.compile("(\\s|\\(|^|\\^)" + conflictingPrefix.getName()); + Matcher m = replacingPattern.matcher(template); + template = m.replaceAll("$1" + newPrefix.getName()); + newTemplates.add(template); + } + templates = new ArrayList<>(newTemplates); + } + } + } + return templates; + } + + String getTurtlePrefixList(Set prefixes) { + List turtlePrefixList = new ArrayList<>(); + for (Prefix prefix : prefixes) { + turtlePrefixList.add(convertPrefixToTurtlePrefix(prefix)); + } + return String.join(" ", turtlePrefixList); + } + + private String getShaclPrefixList(Set prefixes, String template) { + List shaclPrefixList = new ArrayList<>(); + for (Prefix prefix : prefixes) { + shaclPrefixList.add(convertPrefixToShaclPrefix(template, prefix)); + } + return String.join("\n", shaclPrefixList); + } + + private String convertPrefixToTurtlePrefix(Prefix prefix) { + return String.format(TURTLE_PREFIX_TEMPLATE, prefix.getName(), prefix.getUri()); + } + + private String convertPrefixToShaclPrefix(String template, Prefix prefix) { + return String.format(template, + COUNTER, + prefix.getName().substring(0, prefix.getName().length() - 1), + prefix.getUri().substring(1, prefix.getUri().length() - 1)); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParser.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParser.java new file mode 100644 index 0000000..b88d254 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParser.java @@ -0,0 +1,3317 @@ +package be.ugent.idlab.divide.core.query.parser; + +import be.ugent.idlab.divide.core.context.ContextEnrichingQuery; +import be.ugent.idlab.divide.core.context.ContextEnrichment; +import be.ugent.idlab.util.io.IOUtilities; +import be.ugent.idlab.util.rdf.RDFLanguage; +import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaUtilities; +import org.apache.jena.atlas.lib.Pair; +import org.apache.jena.query.Query; +import org.apache.jena.query.QueryExecution; +import org.apache.jena.query.QueryExecutionFactory; +import org.apache.jena.query.QueryFactory; +import org.apache.jena.query.QueryParseException; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.sparql.syntax.ElementPathBlock; +import org.apache.jena.sparql.syntax.ElementVisitorBase; +import org.apache.jena.sparql.syntax.ElementWalker; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +public class DivideQueryParser implements IDivideQueryParser { + + private static final boolean DEBUG = false; + + private static int PREFIX_COUNTER = 0; + + private static final Pattern PREFIX_PATTERN = Pattern.compile( + "(\\s*PREFIX\\s+(\\S+)\\s+(<[^<>]+>))", Pattern.CASE_INSENSITIVE); + + private static final Pattern SPARQL_FROM_NAMED_GRAPH_PATTERN = Pattern.compile( + "\\s*FROM\\s+NAMED\\s+(\\S+)", Pattern.CASE_INSENSITIVE); + + private static final Pattern SPARQL_FROM_DEFAULT_GRAPH_PATTERN = Pattern.compile( + "\\s*FROM\\s+(\\S+)", Pattern.CASE_INSENSITIVE); + + private static final Pattern RSP_QL_FROM_NAMED_GRAPH_PATTERN = Pattern.compile( + "\\s*FROM\\s+NAMED\\s+GRAPH\\s+(\\S+)", Pattern.CASE_INSENSITIVE); + + private static final Pattern RSP_QL_FROM_DEFAULT_GRAPH_PATTERN = Pattern.compile( + "\\s*FROM\\s+GRAPH\\s+(\\S+)", Pattern.CASE_INSENSITIVE); + + private static final Pattern RSP_QL_FROM_NAMED_WINDOW_PATTERN = Pattern.compile( + "\\s*FROM\\s+NAMED\\s+WINDOW\\s+(\\S+)\\s+ON\\s+(\\S+)\\s+\\[([^\\[\\]]+)]", + Pattern.CASE_INSENSITIVE); + + private static final Pattern RSP_QL_WINDOW_PARAMETERS_PATTERN = Pattern.compile( + "\\s*((RANGE\\s+(\\S+))|(FROM\\s+NOW-(\\S+)\\s+TO\\s+NOW-(\\S+)))\\s+(TUMBLING|(STEP\\s+(\\S+)))", + Pattern.CASE_INSENSITIVE); + + private static final Pattern SPARQL_WHERE_CLAUSE_GRAPH_PATTERN = Pattern.compile( + "\\s*(GRAPH)\\s+(\\S+)\\s+\\{", Pattern.CASE_INSENSITIVE); + + private static final Pattern RSP_QL_WHERE_CLAUSE_GRAPH_OR_WINDOW_PATTERN = Pattern.compile( + "\\s*(WINDOW|GRAPH)\\s+(\\S+)\\s+\\{", Pattern.CASE_INSENSITIVE); + + private static final Pattern SPARQL_QUERY_SPLIT_PATTERN = Pattern.compile( + "(" + PREFIX_PATTERN.pattern() + "*)" + // prefix group 1 + ".+(CONSTRUCT|SELECT|ASK|DESCRIBE)((.(?!FROM))*)" + // form group 3, result group 4 + "(\\s*(FROM.+)*)" + // from clauses group 8 + "(WHERE\\s*\\{(.+)})" + // where clause group 11 + "([^{}]*)", // remainder group 12 + Pattern.CASE_INSENSITIVE); + + private static final Pattern SPECIAL_SPARQL_PATTERN = + Pattern.compile("(OPTIONAL|UNION|GRAPH|BIND|GROUP BY|HAVING|MINUS|FILTER)" + + "(.(?!(OPTIONAL|UNION|GRAPH|BIND|GROUP BY|HAVING|MINUS|FILTER)))+", + Pattern.CASE_INSENSITIVE); + + private static final Pattern GROUP_BY_PATTERN = + Pattern.compile("GROUP\\s+BY\\s+(.(?!ORDER|LIMIT|OFFSET))+", Pattern.CASE_INSENSITIVE); + + private static final Pattern PN_CHARS_BASE_PATTERN = + Pattern.compile("([A-Z]|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|" + + "[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|[\u2C00-\u2FEF]|" + + "[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|[\\x{10000}-\\x{EFFFF}])"); + private static final Pattern PN_CHARS_U_PATTERN = + Pattern.compile(String.format("(%s)|_", PN_CHARS_BASE_PATTERN)); + private static final Pattern PN_CHARS_PATTERN = + Pattern.compile(String.format( + "(%s)|-|[0-9]|\u00B7|[\u0300-\u036F]|[\u203F-\u2040]", PN_CHARS_U_PATTERN)); + private static final Pattern PN_PREFIX_PATTERN = + Pattern.compile(String.format("(%s)(((%s)|'.')*(%s))?", + PN_CHARS_BASE_PATTERN, PN_CHARS_PATTERN, PN_CHARS_PATTERN)); + private static final Pattern PN_NAME_NS_PATTERN = + Pattern.compile(String.format("(\\s|\\(|^|\\^)((%s)?:)", PN_PREFIX_PATTERN)); + private static final Pattern VARNAME_PATTERN = + Pattern.compile(String.format( + "((%s)|[0-9])((%s)|[0-9]|\u00B7|[\u0300-\u036F]|[\u203F-\u2040])*", + PN_CHARS_U_PATTERN, PN_CHARS_U_PATTERN)); + private static final Pattern VAR1_PATTERN = + Pattern.compile(String.format("\\?(%s)", VARNAME_PATTERN)); + + private static final Pattern USED_PREFIX_PATTERN = PN_NAME_NS_PATTERN; + private static final Pattern UNBOUND_VARIABLES_PATTERN = VAR1_PATTERN; + private static final Pattern UNBOUND_VARIABLES_IN_STREAM_WINDOW_PATTERN = + Pattern.compile(String.format("\\?\\{(%s)}", VARNAME_PATTERN)); + + private static final Pattern STREAM_WINDOW_PARAMETER_VARIABLE_PATTERN = + Pattern.compile(String.format("((%s)|(PT(%s)([SMH])))", + UNBOUND_VARIABLES_IN_STREAM_WINDOW_PATTERN, + UNBOUND_VARIABLES_IN_STREAM_WINDOW_PATTERN)); + private static final Pattern STREAM_WINDOW_PARAMETER_NUMBER_PATTERN = + Pattern.compile("(PT([0-9]+)([SMH]))"); + + private static final Pattern SELECT_CLAUSE_EXPRESSION_PATTERN = + Pattern.compile(String.format("\\(\\s*(\\S+)\\s+AS\\s+(%s)\\s*\\)", VAR1_PATTERN)); + private static final Pattern SELECT_CLAUSE_PATTERN_ENTRY = + Pattern.compile(String.format("((%s)|(%s))\\s+", + SELECT_CLAUSE_EXPRESSION_PATTERN, VAR1_PATTERN)); + private static final Pattern SELECT_CLAUSE_PATTERN = + Pattern.compile(String.format("(%s)+", SELECT_CLAUSE_PATTERN_ENTRY)); + + private static final List POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS = new ArrayList<>(); + + static { + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("optional"); + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("union"); + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("graph"); + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("bind"); + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("group by"); + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("having"); + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("minus"); + POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS.add("filter"); + } + + private final DivideQueryGenerator divideQueryGenerator; + private final boolean processUnmappedVariableMatches; + private final boolean validateUnboundVariablesInRspQlQueryBody; + + DivideQueryParser(boolean processUnmappedVariableMatches, + boolean validateUnboundVariablesInRspQlQueryBody) { + this.divideQueryGenerator = new DivideQueryGenerator(); + + this.processUnmappedVariableMatches = processUnmappedVariableMatches; + this.validateUnboundVariablesInRspQlQueryBody = validateUnboundVariablesInRspQlQueryBody; + + // initialize Jena + org.apache.jena.query.ARQ.init(); + } + + DivideQueryParser() { + this(true, true); + } + + @Override + public void validateDivideQueryContextEnrichment(ContextEnrichment contextEnrichment) + throws InvalidDivideQueryParserInputException { + // check all context-enriching queries + for (ContextEnrichingQuery query : contextEnrichment.getQueries()) { + // split query + SplitSparqlQuery splitSparqlQuery = splitSparqlQuery(" " + query.getQuery()); + + // ensure query is of CONSTRUCT form + if (splitSparqlQuery.getQueryForm() != QueryForm.CONSTRUCT) { + throw new InvalidDivideQueryParserInputException( + "Context-enriching query should be of CONSTRUCT form"); + } + + // ensure query does not contain any FROM clauses + if (splitSparqlQuery.getFromPart() != null && + !splitSparqlQuery.getFromPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Context-enriching query should not contain any FROM clauses"); + } + + // ensure query does not contain any final part (solution modifiers) + if (splitSparqlQuery.getFinalPart() != null && + !splitSparqlQuery.getFinalPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Context-enriching query should not contain any solution modifiers"); + } + + // ensure query is valid SPARQL + try (QueryExecution queryExecution = QueryExecutionFactory.create( + query.getQuery(), ModelFactory.createDefaultModel())) { + queryExecution.execConstruct(); + } catch (Exception e) { + throw new InvalidDivideQueryParserInputException( + "Context-enriching query should be valid SPARQL"); + } + } + } + + @Override + public DivideQueryParserOutput parseDivideQuery(DivideQueryParserInput input) + throws InvalidDivideQueryParserInputException { + // make sure the input is validated & preprocessed + // (because the remainder of the parsing assumes valid & preprocessed input) + input.validate(); + input.preprocess(); + + // process variable mapping between stream & final query if relevant + MappedDivideQueryParserInput mappedInput = + processStreamToFinalQueryVariableMappings(input); + + // clean input: replace overlapping variables with new non-overlapping ones + CleanDivideQueryParserInput cleanInput = + cleanInputFromOverlappingVariables(mappedInput); + + DivideQueryParserOutput result; + if (input.getInputQueryLanguage() == InputQueryLanguage.SPARQL) { + result = parseDivideQueryFromSparqlQueries(cleanInput); + } else if (input.getInputQueryLanguage() == InputQueryLanguage.RSP_QL) { + result = parseDivideQueryFromRspQlQuery(cleanInput); + } else { + // should not be possible + throw new InvalidDivideQueryParserInputException( + "Invalid input query language"); + } + + // process output again, based on variable mapping + result = restoreOriginalVariablesInOutput(result, cleanInput.getVariableMapping()); + + // increase the counter of the generator which is used to create unique + // pattern and prefixes IRIs + DivideQueryGenerator.COUNTER++; + return result; + } + + private MappedDivideQueryParserInput processStreamToFinalQueryVariableMappings( + DivideQueryParserInput input) throws InvalidDivideQueryParserInputException { + // check if mappings should be analyzed: is the case for SPARQL query input where + // a final query is present + // NOTE: analyzing is also required with an empty mapping, to check all variable + // matches that are not defined in the mapping + boolean mappingAnalysisRequired = + input.getInputQueryLanguage() == InputQueryLanguage.SPARQL && + input.getFinalQuery() != null && !input.getFinalQuery().trim().isEmpty(); + + // if no mapping analysis is required, we can continue with the original input + if (!mappingAnalysisRequired) { + return new MappedDivideQueryParserInput(input); + } + + print("PROCESSING STREAM TO FINAL QUERY VARIABLE MAPPINGS"); + + // validate final query + String finalQuery = input.getFinalQuery(); + validateSparqlQuery(finalQuery, "Final"); + + // split final query to be used further on + SplitSparqlQuery splitFinalQuery = splitSparqlQuery(finalQuery); + + // retrieve mapping + Map mapping = input.getStreamToFinalQueryVariableMapping(); + + // further check mapping in case of ASK query + // -> for ASK queries, the result part is empty, so there is no part of + // the final query that will end up in the RSP-QL query body + // -> no mapping should be done + if (splitFinalQuery.getQueryForm() == QueryForm.ASK) { + // so in case the mapping is empty, we can continue with the original input + // -> if not, this is an indication of wrong input + if (mapping.isEmpty()) { + return new MappedDivideQueryParserInput(input); + } else { + throw new InvalidDivideQueryParserInputException( + "No stream to final query variable mapping should be provided " + + "if the final query is an ASK query."); + } + } + + // IF THIS POINT IS REACHED, A VARIABLE MATCH & MAPPING CHECK SHOULD BE DONE + // -> based on the mappings, the stream and final query should both be analyzed + // -> if adaptations to variable names are required, only the final query will + // be updated + // BUT: what about variables occurring in other input parts? + // -> solution modifier: this is used in the final RSP-QL query, of which the + // WHERE clause is fully extracted from the stream query + // -> stream windows: variables occurring in the stream windows should always be + // replaced as window parameter during the query derivation, so + // they should either occur in the stream part of the stream + // query, or they are just put there to allow replacement of the + // default window parameter value via context-enriching queries + // -> intermediate queries: they are used separately as extra rules in addition + // to the sensor query rule, but not used in the sensor + // query rule so no matching is required of them + // CONCLUSION: if no updates are made to the variables as how they occur in the + // stream query, then no updates are required to the variables occurring + // in the solution modifier, stream windows & intermediate queries + // => to align all matches and remove identical variable names for non-matches, + // it suffices to only make updates to variable names in final query + + // extract all variables occurring in stream query and final query + List streamQueryVariables = findUnboundVariables(input.getStreamQuery()); + List finalQueryVariables = findUnboundVariables(input.getFinalQuery()); + + // check if all variable mappings are valid, i.e. whether all keys are variable + // names in the stream query, and all values are variable names in final query + if (!streamQueryVariables.containsAll(mapping.keySet())) { + throw new InvalidDivideQueryParserInputException( + "Stream to final query variable mapping contains variable " + + "names that do not occur in stream query"); + } + if (!finalQueryVariables.containsAll(mapping.values())) { + throw new InvalidDivideQueryParserInputException( + "Stream to final query variable mapping contains variable " + + "names that do not occur in final query"); + } + + // check if mapping file contains no conflicts + Set mappingValues = new HashSet<>(); + for (String s : mapping.keySet()) { + if (mappingValues.contains(mapping.get(s))) { + throw new InvalidDivideQueryParserInputException( + String.format("Stream to final query variable mapping contains " + + "duplicate mapping to variable '%s'", mapping.get(s))); + } + mappingValues.add(mapping.get(s)); + } + + // create reverse mapping to know the required replacements from the point of + // view of the final query + Map reverseMapping = new HashMap<>(); + for (String s : mapping.keySet()) { + reverseMapping.put(mapping.get(s), s); + } + + // keep track of list of required variable replacements in final query + Map requiredReplacements = new HashMap<>(); + + // create set of all possible conflicting variables to ensure that + // no conflicts are created with the newest variables + // -> this set should of course contain all final query variables + // (both old and new) + // -> but also all stream query variables, to avoid a potential replacement + // of a variable in the final query to an already existing variable in the + // stream query + // (note that conflicts are very unlikely because new random variable names are + // obtained from a random UUID generator, but it is still better to be safe) + Set conflictingVariables = new HashSet<>(finalQueryVariables); + conflictingVariables.addAll(streamQueryVariables); + + // loop over all variables occurring in the final query + // -> a replacement entry should be created for ALL variables + // (also the ones that should not be actually replaced: for them, + // a replacement to themselves should be created) + for (String finalQueryVariable : finalQueryVariables) { + if (reverseMapping.containsKey(finalQueryVariable)) { + // if the variable has a defined mapping, the required replacement in + // the final query is obvious + requiredReplacements.put( + finalQueryVariable, reverseMapping.get(finalQueryVariable)); + conflictingVariables.add(reverseMapping.get(finalQueryVariable)); + print("Add defined mapping: " + finalQueryVariable + + " to " + reverseMapping.get(finalQueryVariable)); + + } else if (streamQueryVariables.contains(finalQueryVariable) && + (mapping.containsKey(finalQueryVariable) || !processUnmappedVariableMatches)) { + // if the final query variable also occurs in the stream query, and there + // is no specifically defined variable in the stream query to which this + // matches, then it depends on 2 things to decide whether this variable + // should be replaced: + // 1. if the variable also occurs as key of the mapping, then it should be + // replace by a random new variable, because there will be another final + // variable that is replaced by this variable + // 2. if not, then the variable does not occur in the mapping (not in the key set + // if condition 1 above is not fulfilled, and not in the value set since + // the reverse mapping's key set does not contain this variable) + // -> then it depends on how to handle unmapped matches: if unmapped variable + // matches should not be processed, this means that they cannot be considered + // as a match, even though their names happen to be identical + // -> then a replacement is also required + // (otherwise, they can be considered as a match, and this means they + // can be left unchanged) + + // -> the final query variable should be replaced to a new variable + // that is not occurring in the stream query, and that is also not + // yet occurring in the final query + boolean variableAccepted = false; + while (!variableAccepted) { + String triedNewVariable = generateRandomUnboundVariable(); + // there may be no final query variable that equals this name or + // of which the new variable is a substring + variableAccepted = conflictingVariables + .stream() + .noneMatch(s -> s.equals(triedNewVariable) || + s.contains(triedNewVariable)); + if (variableAccepted) { + requiredReplacements.put( + finalQueryVariable, triedNewVariable); + conflictingVariables.add(triedNewVariable); + print("Add additional mapping: " + + finalQueryVariable + " to " + triedNewVariable); + } + } + + } else { + // if it's a variable that is not occurring in the stream query, and also not + // a variable that should be mapped, then it can be left as is + // -> a replacement to itself should then be created + requiredReplacements.put(finalQueryVariable, finalQueryVariable); + } + } + + // split replacement list in two to first do some temporal replacements + // -> these replacements will be done first before doing the actual replacements + // -> this is to avoid that conflicts occur with cross-referenced mappings, e.g., + // where ?a is mapped to ?b and ?b is mapped to ?a + // -> this works if the resulting variables after replacement are unique, i.e., + // they do not occur as such in the list of variables or as a substring of any + // of these variables + Map temporalReplacements = new HashMap<>(); + Map finalReplacements = new HashMap<>(); + for (Map.Entry requiredReplacement : requiredReplacements.entrySet()) { + String temporalVariable = ""; + boolean variableAccepted = false; + while (!variableAccepted) { + String triedNewVariable = generateRandomUnboundVariable(); + // there may be no final query variable that equals this name or + // of which the new variable is a substring + variableAccepted = conflictingVariables + .stream() + .noneMatch(s -> s.equals(triedNewVariable) || + s.contains(triedNewVariable)); + if (variableAccepted) { + temporalVariable = triedNewVariable; + conflictingVariables.add(triedNewVariable); + } + } + + // split up replacements + temporalReplacements.put( + requiredReplacement.getKey(), temporalVariable); + finalReplacements.put( + temporalVariable, requiredReplacement.getValue()); + } + + print("Temporal replacements: " + temporalReplacements); + print("Final replacements: " + finalReplacements); + + // first do temporal replacements + List sortedTemporalReplacementKeys = temporalReplacements.keySet() + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + print("Order of temporal replacements: " + sortedTemporalReplacementKeys); + for (String key : sortedTemporalReplacementKeys) { + finalQuery = finalQuery.replaceAll( + Pattern.quote(key), temporalReplacements.get(key)); + } + print("Final query after temporal replacements: " + finalQuery); + + // then also do final replacements + List finalTemporalReplacementKeys = finalReplacements.keySet() + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + print("Order of final replacements: " + finalTemporalReplacementKeys); + for (String key : finalTemporalReplacementKeys) { + finalQuery = finalQuery.replaceAll( + Pattern.quote(key), finalReplacements.get(key)); + } + print("Final query after final replacements: " + finalQuery); + print("======================================"); + + return new MappedDivideQueryParserInput( + input.getInputQueryLanguage(), + input.getStreamWindows(), + input.getStreamQuery(), + input.getIntermediateQueries(), + finalQuery, + input.getSolutionModifier(), + requiredReplacements); + } + + private DivideQueryParserOutput parseDivideQueryFromSparqlQueries(CleanDivideQueryParserInput input) + throws InvalidDivideQueryParserInputException { + // validate stream query + validateSparqlQuery(input.getStreamQuery(), "Stream"); + + // parse stream query + ParsedSparqlQuery parsedStreamQuery = parseSparqlQuery(input.getStreamQuery()); + + // if final query of input is not present, and query form of stream query + // is not CONSTRUCT, a new input should be constructed in order to properly + // deal with this! + if (input.getFinalQuery() == null && + parsedStreamQuery.getSplitSparqlQuery().getQueryForm() != QueryForm.CONSTRUCT) { + String constructTemplate; + String newStreamQuery; + String newFinalQuery; + + if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.SELECT) { + // in case of a SELECT query, all variables occurring in the + // SELECT clause should be transformed to a CONSTRUCT template + // -> first parse SELECT clause + List selectVariables = parseSelectClause( + parsedStreamQuery.getSplitSparqlQuery().getResultPart()); + + // only retain those that match the actual variable mapping, excluding + // "(... AS ?...)" definitions -> only those should be mapped to CONSTRUCT template + List actualSelectVariables = selectVariables + .stream() + .filter(s -> UNBOUND_VARIABLES_PATTERN.matcher(s).matches()) + .collect(Collectors.toList()); + + // create CONSTRUCT template with random triple for each variable + constructTemplate = actualSelectVariables + .stream() + .map(s -> String.format("%s " + + " .", + s, UUID.randomUUID(), UUID.randomUUID())) + .collect(Collectors.joining(" ")); + + // create updated final SELECT query based on CONSTRUCT template and original input + newFinalQuery = String.format("%s SELECT %s WHERE { %s }", + parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(), + parsedStreamQuery.getSplitSparqlQuery().getResultPart(), + constructTemplate).trim(); + + } else if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.DESCRIBE) { + // in case of a DESCRIBE query, all variables occurring in the + // DESCRIBE clause should be transformed to a CONSTRUCT template + // -> first parse DESCRIBE clause + List describeVariables = new ArrayList<>(); + Matcher m = UNBOUND_VARIABLES_PATTERN.matcher( + parsedStreamQuery.getSplitSparqlQuery().getResultPart()); + while (m.find()) { + describeVariables.add(m.group()); + } + + // create CONSTRUCT template with random triple for each variable + constructTemplate = describeVariables + .stream() + .map(s -> String.format("%s " + + " .", + s, UUID.randomUUID(), UUID.randomUUID())) + .collect(Collectors.joining(" ")); + + // create updated final DESCRIBE query based on CONSTRUCT template and original input + newFinalQuery = String.format("%s DESCRIBE %s WHERE { %s }", + parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(), + parsedStreamQuery.getSplitSparqlQuery().getResultPart(), + constructTemplate).trim(); + + } else { // QueryForm.ASK + // in case of an ASK query, no variables occur in the result part + // -> a random triple should be generated to link both queries + constructTemplate = String.format( + " " + + " "+ + " .", + UUID.randomUUID(), UUID.randomUUID(), + UUID.randomUUID()); + + // create updated final DESCRIBE query based on CONSTRUCT template and original input + newFinalQuery = String.format("%s ASK WHERE { %s }", + parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(), + constructTemplate).trim(); + } + + // create updated stream query based on CONSTRUCT template and original input + newStreamQuery = String.format("%s\nCONSTRUCT\n{\n%s\n}\n%s\nWHERE {\n%s\n} %s", + parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(), + constructTemplate, + parsedStreamQuery.getSplitSparqlQuery().getFromPart(), + parsedStreamQuery.getSplitSparqlQuery().getWherePart(), + parsedStreamQuery.getSplitSparqlQuery().getFinalPart()).trim(); + + // create new parser input based on new stream & final queries, and copy other entries + CleanDivideQueryParserInput newInput = new CleanDivideQueryParserInput( + input.getInputQueryLanguage(), + input.getStreamWindows(), + newStreamQuery, + new ArrayList<>(), + newFinalQuery, + input.getSolutionModifier(), + input.getVariableMapping()); + newInput.setUnboundVariables(input.getUnboundVariables()); + newInput.setFinalQueryVariableMapping(input.getFinalQueryVariableMapping()); + newInput.preprocess(); + + // perform the parsing again for this adapted input + return parseDivideQueryFromSparqlQueries(newInput); + } + + // check if stream query has no final part + if (parsedStreamQuery.getSplitSparqlQuery().getFinalPart() != null && + !parsedStreamQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Input queries cannot contain any solution modifiers, since this" + + " cannot be preserved by DIVIDE (because individual" + + " instantiated queries are generated). Any solution modifier" + + " for the queries derived by DIVIDE can be defined as a" + + " separate input entry."); + } + + // validate stream window definitions + for (StreamWindow streamWindow : input.getStreamWindows()) { + Matcher m = RSP_QL_WINDOW_PARAMETERS_PATTERN.matcher( + streamWindow.getWindowDefinition()); + if (!m.matches()) { + throw new InvalidDivideQueryParserInputException( + String.format("Stream window with name '%s' contains invalid" + + " RSP-QL window definition", streamWindow.getStreamIri())); + } + } + + // retrieve the graph names used in the FROM clauses of this SPARQL query + Pair, String> inputGraphNamesResult = retrieveGraphNamesFromSparqlFromPart( + parsedStreamQuery.getSplitSparqlQuery().getFromPart(), + parsedStreamQuery.getPrefixes()); + List inputGraphNames = inputGraphNamesResult.getLeft(); + + // parse remainder of FROM clause: it can only contain default graph patterns + String fromPartLeftover = inputGraphNamesResult.getRight(); + Matcher m = SPARQL_FROM_DEFAULT_GRAPH_PATTERN.matcher(fromPartLeftover); + while (m.find()) { + fromPartLeftover = fromPartLeftover.replace(m.group().trim(), "").trim(); + } + if (!fromPartLeftover.trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + String.format("SPARQL query contains invalid part '%s'", fromPartLeftover)); + } + + // parse the WHERE clause based on the used prefixes & defined input graph names + WhereClause streamQueryWhereClause = parseWhereClauseOfQuery( + parsedStreamQuery.getSplitSparqlQuery().getWherePart(), + parsedStreamQuery.getPrefixes(), + inputGraphNames, + InputQueryLanguage.SPARQL); + + // parse where clause of stream query + ParsedStreamQueryWhereClause parsedStreamQueryWhereClause = + parseStreamQueryWhereClauseOfQuery( + streamQueryWhereClause, + input.getStreamWindows() + .stream() + .map(StreamWindow::getStreamIri) + .collect(Collectors.toList())); + + // validate parsed where clause of stream query: there should be at least + // 1 graph on a stream IRI (otherwise there is no point of constructing + // RSP queries with DIVIDE) + if (parsedStreamQueryWhereClause.getStreamItems() + .stream() + .noneMatch(whereClauseItem + -> whereClauseItem.getItemType() == WhereClauseItemType.GRAPH)) { + throw new InvalidDivideQueryParserInputException( + "Stream query should at least contain 1 graph on stream IRI in WHERE clause"); + } + + // validate defined solution modifier as valid SPARQL + List solutionModifierVariables = new ArrayList<>(); + if (!input.getSolutionModifier().trim().isEmpty()) { + solutionModifierVariables.addAll( + findUnboundVariables(input.getSolutionModifier())); + try { + List selectVariables = new ArrayList<>(); + List whereClauseVariables = new ArrayList<>(); + Matcher solutionModifierMatcher = + GROUP_BY_PATTERN.matcher(input.getSolutionModifier()); + if (solutionModifierMatcher.find()) { + selectVariables.addAll(findUnboundVariables(solutionModifierMatcher.group())); + whereClauseVariables.addAll(solutionModifierVariables); + } else { + if (solutionModifierVariables.isEmpty()) { + selectVariables.add("?x"); + } else { + selectVariables.addAll(solutionModifierVariables); + } + whereClauseVariables.addAll(selectVariables); + } + String testQuery = String.format("SELECT %s WHERE { %s } %s", + String.join(" ", selectVariables), + whereClauseVariables.stream().map(s -> s + " ?a ?b . "). + collect(Collectors.joining(" ")), + input.getSolutionModifier()); + QueryFactory.create(testQuery); + } catch (QueryParseException e) { + throw new InvalidDivideQueryParserInputException( + "Defined solution modifier is no valid SPARQL"); + } + } + + // validate variables used in stream window definitions + // -> first parse to check if they should be mapped to a new variable + // based on the preprocessing + // -> then check if antecedent of sensor query rule will contain this variable, + // OR that a default value is specified for this variable in the config + List parsedStreamWindows = new ArrayList<>(); + for (StreamWindow streamWindow : input.getStreamWindows()) { + ParsedStreamWindow parsedStreamWindow = + parseStreamWindow(streamWindow, input.getVariableMapping()); + + List unboundVariablesInContext = + findUnboundVariables(parsedStreamQueryWhereClause.getContextPart()); + for (String unboundVariable : parsedStreamWindow.getUnboundVariables()) { + if (parsedStreamWindow.getDefaultWindowParameterValues().containsKey(unboundVariable)) { + if (unboundVariablesInContext.contains(unboundVariable)) { + throw new InvalidDivideQueryParserInputException(String.format( + "Variables defined in the stream window parameters should either occur " + + "in the context part of the stream query (in order to be able " + + "to be substituted during the query derivation), OR a default " + + "value for this variable should be specified in the " + + "configuration. For variable %s, the first condition is " + + "fulfilled, so a default value cannot be specified in the " + + "configuration.", input.getReverseVariableMapping().getOrDefault( + unboundVariable, unboundVariable))); + } + } else { + if (!unboundVariablesInContext.contains(unboundVariable)) { + throw new InvalidDivideQueryParserInputException(String.format( + "Variables defined in the stream window parameters should either occur " + + "in the context part of the stream query (in order to be able " + + "to be substituted during the query derivation), OR a default " + + "value for this variable should be specified in the " + + "configuration. For variable %s, the first condition is not " + + "fulfilled, so a default value should be specified in the " + + "configuration.", input.getReverseVariableMapping().getOrDefault( + unboundVariable, unboundVariable))); + } + } + } + + parsedStreamWindows.add(parsedStreamWindow); + } + + // declare variables which need to be initialized differently + // based on the queries in the parser input + String resultingQueryOutput; + QueryForm resultingQueryForm; + String goal; + List intermediateQueries = new ArrayList<>(); + Set queryPatternPrefixes; + Set sensorQueryRulePrefixes; + + // if no final query is present, the streaming query is the only input + // (there can also be no intermediate queries without a final query) + if (input.getFinalQuery() == null) { + // you already know it is a CONSTRUCT query, otherwise it will have been + // transformed to a new input above + + // in that case, the original output of the streaming query is also + // the output of the RSP-QL query generated with DIVIDE + // (and similarly for the form of this query) + resultingQueryOutput = parsedStreamQuery.getSplitSparqlQuery().getResultPart(); + resultingQueryForm = parsedStreamQuery.getSplitSparqlQuery().getQueryForm(); + + // in this case, the query pattern prefixes can simply be the prefixes used + // in the streaming query & sensor query rule + queryPatternPrefixes = new HashSet<>(parsedStreamQuery.getPrefixes()); + sensorQueryRulePrefixes = new HashSet<>(parsedStreamQuery.getPrefixes()); + + // in this case, the reasoner goal for DIVIDE is simply this query output + // in both antecedent & consequence + goal = divideQueryGenerator.createGoal( + parsedStreamQuery.getPrefixes(), + resultingQueryOutput, + resultingQueryOutput); + + } else { + // if a final query is present, it should be ensured that the stream query + // is of CONSTRUCT form (only the final query can have another form) + if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() != QueryForm.CONSTRUCT) { + throw new InvalidDivideQueryParserInputException( + "Stream query should be a CONSTRUCT query if another " + + "final query is specified"); + } + + // parse final query + ParsedSparqlQuery parsedFinalQuery = parseSparqlQuery(input.getFinalQuery()); + + // check if WHERE clause exists + if (parsedFinalQuery.getSplitSparqlQuery().getWherePart() == null || + parsedFinalQuery.getSplitSparqlQuery().getWherePart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + String.format("Final query of %s form should have a non-empty " + + "WHERE clause.%s", parsedFinalQuery.getSplitSparqlQuery().getQueryForm(), + parsedFinalQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.ASK ? + " For a final ASK query, the WHERE keyword should be " + + "explicitly mentioned." : "")); + } + + // check if result part is empty for ASK queries (= required!) + if (parsedFinalQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.ASK && + parsedFinalQuery.getSplitSparqlQuery().getResultPart() != null && + !parsedFinalQuery.getSplitSparqlQuery().getResultPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Final query of ASK form should fulfill regex 'ASK (FROM .*)* WHERE {.*}'."); + } + + // the final query may not contain any FROM definitions + if (parsedFinalQuery.getSplitSparqlQuery().getFromPart() != null && + !parsedFinalQuery.getSplitSparqlQuery().getFromPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Final query cannot contain any FROM parts"); + } + + // check if final query has no final part + if (parsedFinalQuery.getSplitSparqlQuery().getFinalPart() != null && + !parsedFinalQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Input queries cannot contain any solution modifiers, since this" + + " cannot be preserved by DIVIDE (because individual" + + " instantiated queries are generated). Any solution modifier" + + " for the queries derived by DIVIDE can be defined as a" + + " separate input entry."); + } + + // ensure no conflicts exist between parsed final query & prefixes in stream query + parsedFinalQuery = solvePrefixConflicts( + parsedStreamQuery.getPrefixes(), parsedFinalQuery); + + // in this case, the original output of the final query is also + // the output of the RSP-QL query generated with DIVIDE + // (and similarly for the form of this query) + resultingQueryOutput = parsedFinalQuery.getSplitSparqlQuery().getResultPart(); + resultingQueryForm = parsedFinalQuery.getSplitSparqlQuery().getQueryForm(); + + // in this case, the prefixes of both the stream & final query need + // to be merged to be used for the query pattern + queryPatternPrefixes = new HashSet<>(parsedStreamQuery.getPrefixes()); + queryPatternPrefixes.addAll(parsedFinalQuery.getPrefixes()); + + // the sensor query rule prefixes set should only contain the prefixes + // of the stream query (potentially added later with the prefixes of + // any intermediate queries) + sensorQueryRulePrefixes = new HashSet<>(parsedStreamQuery.getPrefixes()); + + // in this case, the goal is constructed differently based on the query form: + if (resultingQueryForm == QueryForm.CONSTRUCT) { + // in case of a CONSTRUCT query, the goal takes the WHERE clause of + // the final query as antecedent, and the output of the CONSTRUCT + // query (i.e., the CONSTRUCT clause) as consequence + goal = divideQueryGenerator.createGoal( + parsedFinalQuery.getPrefixes(), + parsedFinalQuery.getSplitSparqlQuery().getWherePart(), + resultingQueryOutput); + + } else { // QueryForm.ASK, QueryForm.DESCRIBE or QueryForm.SELECT + // in case of a SELECT, ASK or DESCRIBE query, both the antecedent and + // the consequence of the goal are set to the WHERE clause of the final query + goal = divideQueryGenerator.createGoal( + parsedFinalQuery.getPrefixes(), + parsedFinalQuery.getSplitSparqlQuery().getWherePart(), + parsedFinalQuery.getSplitSparqlQuery().getWherePart()); + } + + // if intermediate queries are provided, they should be parsed and + // added to the inputs for the creation of the sensor query rule + if (!input.getIntermediateQueries().isEmpty()) { + for (String intermediateQueryString : input.getIntermediateQueries()) { + // split intermediate query + ParsedSparqlQuery parsedIntermediateQuery = + parseSparqlQuery(intermediateQueryString); + + // check if intermediate query has no final part + if (parsedIntermediateQuery.getSplitSparqlQuery().getFinalPart() != null && + !parsedIntermediateQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Input queries cannot contain any solution modifiers, since this" + + " cannot be preserved by DIVIDE (because individual" + + " instantiated queries are generated). Any solution modifier" + + " for the queries derived by DIVIDE can be defined as a" + + " separate input entry."); + } + + // ensure no conflicts exist between parsed intermediate query & prefixes + // in stream & final query + parsedIntermediateQuery = solvePrefixConflicts( + queryPatternPrefixes, parsedIntermediateQuery); + + // add prefixes to prefixes used for sensor query rule + sensorQueryRulePrefixes.addAll(parsedIntermediateQuery.getPrefixes()); + + // ensure that intermediate query is of CONSTRUCT form (only the final query + // can have another form) + if (parsedIntermediateQuery.getSplitSparqlQuery().getQueryForm() + != QueryForm.CONSTRUCT) { + throw new InvalidDivideQueryParserInputException( + "Intermediate queries should always be CONSTRUCT queries"); + } + + // save intermediate query + intermediateQueries.add(parsedIntermediateQuery); + } + } + } + + // convert the parsed stream windows into a set of converted stream windows + List convertedStreamWindows = new ArrayList<>(); + for (ParsedStreamWindow parsedStreamWindow : parsedStreamWindows) { + convertedStreamWindows.add(convertParsedStreamWindow(parsedStreamWindow)); + } + + // generate RSP-QL query based on parsing output + RspQlQueryBody rspQlQueryBody = divideQueryGenerator.createRspQlQueryBody( + resultingQueryForm, + resultingQueryOutput, + parsedStreamQueryWhereClause.getStreamItems(), + input.getSolutionModifier(), + convertedStreamWindows, + this); + + // retrieve input variables for sensor query rule + List inputVariables = retrieveInputVariables( + parsedStreamQueryWhereClause.getContextPart(), + rspQlQueryBody.getUnboundVariables()); + + // check unbound variables of generated RSP-QL query body + if (validateUnboundVariablesInRspQlQueryBody) { + validateUnboundVariablesInRspQlQueryBody( + rspQlQueryBody, inputVariables, input.getReverseVariableMapping(), + input.getFinalQueryVariableMapping()); + } + + // check that solution modifier does not contain an input variable + if (inputVariables.stream().anyMatch( + solutionModifierVariables::contains)) { + throw new InvalidDivideQueryParserInputException( + "Solution modifier contains variable that will be instantiated " + + "by the DIVIDE query derivation"); + } + + // check that solution modifier only contains variables that are occurring + // in the RSP-QL query body + if (!findUnboundVariables(rspQlQueryBody.getQueryBody().replace( + input.getSolutionModifier(), "")) + .containsAll(solutionModifierVariables)) { + throw new InvalidDivideQueryParserInputException( + "Solution modifier contains variables that do not occur in the " + + "instantiated RSP-QL query body"); + } + + // save some variables that might or might not be updated below + String sensorQueryRuleContextPart = parsedStreamQueryWhereClause.getContextPart(); + List parsedStreamQueryWhereClauseStreamItems = + parsedStreamQueryWhereClause.getStreamItems(); + String parsedStreamQueryResultPart = + parsedStreamQuery.getSplitSparqlQuery().getResultPart(); + + // check to update RSP-QL body string for SELECT queries + if (resultingQueryForm == QueryForm.SELECT) { + // retrieve SELECT variables from output + List selectVariables = parseSelectClause(resultingQueryOutput); + + // adaptations are only needed if any of the select variables is an input + // variable of the sensor query rule (because then it will be substituted) + List selectInputVariables = selectVariables + .stream() + .filter(inputVariables::contains) + .collect(Collectors.toList()); + if (!selectInputVariables.isEmpty()) { + // calculate all input variables in the DIVIDE parser input + Set allInputVariables = input.getUnboundVariables(); + + // generate a random DIVIDE variable for all SELECT input variables + Map variableMapping = new HashMap<>(); + for (String selectInputVariable : selectInputVariables) { + String newVariable = null; + boolean variableAccepted = false; + while (!variableAccepted) { + String triedNewVariable = generateRandomUnboundVariable(); + // there may be no existing input variable that is contained in this + // possible new input variable + variableAccepted = allInputVariables + .stream() + .noneMatch(triedNewVariable::contains); + if (variableAccepted) { + newVariable = triedNewVariable; + } + } + variableMapping.put(selectInputVariable, newVariable); + } + + // update list of input variables + inputVariables = inputVariables + .stream() + .map(s -> variableMapping.getOrDefault(s, s)) + .collect(Collectors.toList()); + + // update sensor query rule context & consequence + for (String selectInputVariable : selectInputVariables) { + sensorQueryRuleContextPart = sensorQueryRuleContextPart.replace( + selectInputVariable, variableMapping.get(selectInputVariable)); + parsedStreamQueryResultPart = parsedStreamQueryResultPart.replace( + selectInputVariable, variableMapping.get(selectInputVariable)); + } + + // update stream windows + List newConvertedStreamWindows = new ArrayList<>(); + for (ConvertedStreamWindow convertedStreamWindow : convertedStreamWindows) { + String iri = convertedStreamWindow.getStreamIri(); + String windowDefinition = convertedStreamWindow.getWindowDefinition(); + for (String selectInputVariable : selectInputVariables) { + windowDefinition = windowDefinition.replaceAll( + Pattern.quote(String.format("?{%s}", selectInputVariable.substring(1))), + String.format("?{%s}", variableMapping.get(selectInputVariable).substring(1))); + } + List windowParameters = convertedStreamWindow.getWindowParameters(); + windowParameters = windowParameters + .stream() + .map(wp -> new WindowParameter( + variableMapping.getOrDefault(wp.getVariable(), wp.getVariable()), + wp.isValueSubstitutionVariable() ? + variableMapping.getOrDefault(wp.getVariable(), wp.getVariable()) : + wp.getValue(), + wp.getType(), + wp.isValueSubstitutionVariable())) + .collect(Collectors.toList()); + newConvertedStreamWindows.add( + new ConvertedStreamWindow(iri, windowDefinition, windowParameters)); + } + convertedStreamWindows = new ArrayList<>(newConvertedStreamWindows); + + // update RSP-QL query body + String solutionModifier = input.getSolutionModifier(); + for (String selectInputVariable : selectInputVariables) { + solutionModifier = solutionModifier.replace( + selectInputVariable, variableMapping.get(selectInputVariable)); + } + resultingQueryOutput = selectVariables + .stream() + .map(s -> variableMapping.containsKey(s) + ? String.format("(%s AS %s)", variableMapping.get(s), s) + : s) + .collect(Collectors.joining(" ")); + List whereClauseStreamItems = new ArrayList<>(); + for (WhereClauseItem item : parsedStreamQueryWhereClauseStreamItems) { + if (item.getItemType() == WhereClauseItemType.EXPRESSION) { + WhereClauseExpressionItem expressionItem = (WhereClauseExpressionItem) item; + String expression = expressionItem.getExpression(); + for (String selectInputVariable : selectInputVariables) { + expression = expression.replace( + selectInputVariable, variableMapping.get(selectInputVariable)); + } + whereClauseStreamItems.add(new WhereClauseExpressionItem(expression)); + + } else if (item.getItemType() == WhereClauseItemType.GRAPH) { + WhereClauseGraphItem graphItem = (WhereClauseGraphItem) item; + Graph graph = graphItem.getGraph(); + String expression = graph.getClause(); + for (String selectInputVariable : selectInputVariables) { + expression = expression.replace( + selectInputVariable, variableMapping.get(selectInputVariable)); + } + whereClauseStreamItems.add(new WhereClauseGraphItem( + new Graph(graph.getName(), expression))); + } + } + parsedStreamQueryWhereClauseStreamItems = new ArrayList<>(whereClauseStreamItems); + rspQlQueryBody = divideQueryGenerator.createRspQlQueryBody( + resultingQueryForm, + resultingQueryOutput, + whereClauseStreamItems, + solutionModifier, + convertedStreamWindows, + this); + } + } + + // update output to be used for sensor query + String sensorQueryRuleResult = extendOutputOfStreamQueryForSensorQueryRule( + parsedStreamQueryWhereClauseStreamItems, + parsedStreamQueryResultPart, + sensorQueryRulePrefixes); + + // generate query pattern based on RSP-QL query body and parsing output + String queryPattern = divideQueryGenerator.createQueryPattern( + resultingQueryForm, + queryPatternPrefixes, + rspQlQueryBody.getQueryBody()); + + // retrieve output variables for sensor query rule + List outputVariables = retrieveOutputVariables( + sensorQueryRuleContextPart, + sensorQueryRuleResult); + + // generate sensor query rule + List allWindowParameters = new ArrayList<>(); + for (ConvertedStreamWindow convertedStreamWindow : convertedStreamWindows) { + allWindowParameters.addAll(convertedStreamWindow.getWindowParameters()); + } + String sensorQueryRule = divideQueryGenerator.createSensorQueryRule( + sensorQueryRulePrefixes, + sensorQueryRuleContextPart, + sensorQueryRuleResult, + inputVariables, + allWindowParameters, + outputVariables, + intermediateQueries); + + return new DivideQueryParserOutput( + queryPattern, sensorQueryRule, goal, resultingQueryForm); + } + + private DivideQueryParserOutput parseDivideQueryFromRspQlQuery(CleanDivideQueryParserInput input) + throws InvalidDivideQueryParserInputException { + // only the main stream query should be considered in this case + // window parameters are taken from the query + + // parse the RSP-QL stream query + ParsedSparqlQuery parsedStreamQuery = parseRspQlQuery(input.getStreamQuery()); + + // check if stream query has no final part + if (parsedStreamQuery.getSplitSparqlQuery().getFinalPart() != null && + !parsedStreamQuery.getSplitSparqlQuery().getFinalPart().trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "Input queries cannot contain any solution modifiers, since this" + + " cannot be preserved by DIVIDE (because individual" + + " instantiated queries are generated). Any solution modifier" + + " for the queries derived by DIVIDE can be defined as a" + + " separate input entry."); + } + + // remove any specified default graph from the SPARQL query + String streamQueryFromPart = parsedStreamQuery.getSplitSparqlQuery().getFromPart(); + Matcher m = RSP_QL_FROM_DEFAULT_GRAPH_PATTERN.matcher(streamQueryFromPart); + while (m.find()) { + streamQueryFromPart = streamQueryFromPart.replace(m.group().trim(), ""); + } + + // retrieve the graph names & stream windows used in the FROM clauses of this SPARQL query + Pair, String> inputGraphNamesResult = retrieveGraphNamesFromRspQlFromPart( + streamQueryFromPart, + parsedStreamQuery.getPrefixes()); + List inputGraphNames = inputGraphNamesResult.getLeft(); + String streamQueryFromPartLeftover = inputGraphNamesResult.getRight(); + Map streamWindowMap = + completeStreamWindowsFromRspQlFromPart( + input.getStreamWindows(), + streamQueryFromPartLeftover, + parsedStreamQuery.getPrefixes()); + inputGraphNames.addAll(streamWindowMap.keySet()); + + // only allow CONSTRUCT RSP-QL queries + // -> if they are of other form, they are translated to SPARQL and + // further parsed as if they were a SPARQL query + if (parsedStreamQuery.getSplitSparqlQuery().getQueryForm() != QueryForm.CONSTRUCT) { + // create SPARQL FROM part + StringBuilder sparqlFromPart = new StringBuilder(); + for (String inputGraphName : inputGraphNames) { + sparqlFromPart.append(String.format("FROM NAMED %s ", + streamWindowMap.containsKey(inputGraphName) + ? streamWindowMap.get(inputGraphName).getStreamIri() + : inputGraphName)); + } + + // create SPARQL WHERE clause + Matcher m1 = Pattern.compile("WINDOW\\s+(\\S+)").matcher( + parsedStreamQuery.getSplitSparqlQuery().getWherePart()); + String sparqlWhereClause = parsedStreamQuery.getSplitSparqlQuery().getWherePart(); + while (m1.find()) { + sparqlWhereClause = sparqlWhereClause.replaceFirst( + m1.group(), + String.format("GRAPH %s", streamWindowMap.get( + resolveGraphName(m1.group(1), + parsedStreamQuery.getPrefixes())).getStreamIri())); + } + + // translate RSP-QL stream query to SPARQL + String sparqlStreamQuery = String.format("%s %s %s %s WHERE { %s } %s", + parsedStreamQuery.getSplitSparqlQuery().getPrefixPart(), + parsedStreamQuery.getSplitSparqlQuery().getQueryForm().toString(), + parsedStreamQuery.getSplitSparqlQuery().getResultPart(), + sparqlFromPart, + sparqlWhereClause, + parsedStreamQuery.getSplitSparqlQuery().getFinalPart()); + + // construct new SPARQL input + CleanDivideQueryParserInput newInput = new CleanDivideQueryParserInput( + InputQueryLanguage.SPARQL, + new ArrayList<>(streamWindowMap.values()), + sparqlStreamQuery, + new ArrayList<>(), + null, + input.getSolutionModifier(), + input.getVariableMapping()); + newInput.setUnboundVariables(input.getUnboundVariables()); + newInput.setFinalQueryVariableMapping(input.getFinalQueryVariableMapping()); + newInput.preprocess(); + + print("RSP-QL query has no CONSTRUCT form => converted to SPARQL " + + "=> new input:\n" + newInput); + + return parseDivideQueryFromSparqlQueries(newInput); + } + + // parse the WHERE clause based on the used prefixes & defined input graph names + WhereClause streamQueryWhereClause = parseWhereClauseOfQuery( + parsedStreamQuery.getSplitSparqlQuery().getWherePart(), + parsedStreamQuery.getPrefixes(), + inputGraphNames, + InputQueryLanguage.RSP_QL); + + // validate stream query + validateSparqlQuery(String.format("%s CONSTRUCT { %s } WHERE { %s }", + parsedStreamQuery.getPrefixes() + .stream() + .map(prefix -> String.format("PREFIX %s %s", + prefix.getName(), prefix.getUri())) + .collect(Collectors.joining(" ")), + parsedStreamQuery.getSplitSparqlQuery().getResultPart(), + streamQueryWhereClause.getItems() + .stream() + .map(WhereClauseItem::getClause) + .collect(Collectors.joining(" "))), + "Stream"); + + // loop over WHERE clause items and adapt graph expression items: + // use actual graph name instead of window name + List newStreamQueryWhereClauseItems = new ArrayList<>(); + for (WhereClauseItem item : streamQueryWhereClause.getItems()) { + if (item.getItemType() == WhereClauseItemType.GRAPH) { + WhereClauseGraphItem graphItem = (WhereClauseGraphItem) item; + if (streamWindowMap.containsKey(graphItem.getGraph().getName())) { + newStreamQueryWhereClauseItems.add(new WhereClauseGraphItem( + new Graph(streamWindowMap.get(graphItem.getGraph().getName()).getStreamIri(), + graphItem.getGraph().getClause()))); + } else { + newStreamQueryWhereClauseItems.add(item); + } + } else { + newStreamQueryWhereClauseItems.add(item); + } + } + streamQueryWhereClause = new WhereClause(newStreamQueryWhereClauseItems); + + // parse where clause of stream query + ParsedStreamQueryWhereClause parsedStreamQueryWhereClause = + parseStreamQueryWhereClauseOfQuery( + streamQueryWhereClause, + streamWindowMap.values() + .stream() + .map(StreamWindow::getStreamIri) + .collect(Collectors.toList())); + + // validate parsed where clause of stream query: there should be at least + // 1 graph on a stream IRI (otherwise there is no point of constructing + // RSP queries with DIVIDE) + if (parsedStreamQueryWhereClause.getStreamItems() + .stream() + .noneMatch(whereClauseItem + -> whereClauseItem.getItemType() == WhereClauseItemType.GRAPH)) { + throw new InvalidDivideQueryParserInputException( + "Stream query should at least contain 1 graph on stream IRI in WHERE clause"); + } + + // validate defined solution modifier + List solutionModifierVariables = new ArrayList<>(); + if (!input.getSolutionModifier().trim().isEmpty()) { + solutionModifierVariables.addAll( + findUnboundVariables(input.getSolutionModifier())); + try { + List selectVariables = new ArrayList<>(); + List whereClauseVariables = new ArrayList<>(); + Matcher solutionModifierMatcher = + GROUP_BY_PATTERN.matcher(input.getSolutionModifier()); + if (solutionModifierMatcher.find()) { + selectVariables.addAll(findUnboundVariables(solutionModifierMatcher.group())); + whereClauseVariables.addAll(solutionModifierVariables); + } else { + if (solutionModifierVariables.isEmpty()) { + selectVariables.add("?x"); + } else { + selectVariables.addAll(solutionModifierVariables); + } + whereClauseVariables.addAll(selectVariables); + } + String testQuery = String.format("SELECT %s WHERE { %s } %s", + String.join(" ", selectVariables), + whereClauseVariables.stream().map(s -> s + " ?a ?b . "). + collect(Collectors.joining(" ")), + input.getSolutionModifier()); + QueryFactory.create(testQuery); + } catch (QueryParseException e) { + throw new InvalidDivideQueryParserInputException( + "Defined solution modifier is no valid SPARQL"); + } + } + + // validate variables used in stream window definitions + // -> first parse to check if they should be mapped to a new variable + // based on the preprocessing + // -> then check if antecedent of sensor query rule will contain this variable, + // OR that a default value is specified for this variable in the config + List parsedStreamWindows = new ArrayList<>(); + for (StreamWindow streamWindow : streamWindowMap.values()) { + ParsedStreamWindow parsedStreamWindow = + parseStreamWindow(streamWindow, input.getVariableMapping()); + + List unboundVariablesInContext = + findUnboundVariables(parsedStreamQueryWhereClause.getContextPart()); + for (String unboundVariable : parsedStreamWindow.getUnboundVariables()) { + if (parsedStreamWindow.getDefaultWindowParameterValues().containsKey(unboundVariable)) { + if (unboundVariablesInContext.contains(unboundVariable)) { + throw new InvalidDivideQueryParserInputException(String.format( + "Variables defined in the stream window parameters should either occur " + + "in the context part of the stream query (in order to be able " + + "to be substituted during the query derivation), OR a default " + + "value for this variable should be specified in the " + + "configuration. For variable %s, the first condition is " + + "fulfilled, so a default value cannot be specified in the " + + "configuration.", input.getReverseVariableMapping().getOrDefault( + unboundVariable, unboundVariable))); + } + } else { + if (!unboundVariablesInContext.contains(unboundVariable)) { + throw new InvalidDivideQueryParserInputException(String.format( + "Variables defined in the stream window parameters should either occur " + + "in the context part of the stream query (in order to be able " + + "to be substituted during the query derivation), OR a default " + + "value for this variable should be specified in the " + + "configuration. For variable %s, the first condition is not " + + "fulfilled, so a default value should be specified in the " + + "configuration.", input.getReverseVariableMapping().getOrDefault( + unboundVariable, unboundVariable))); + } + } + } + + parsedStreamWindows.add(parsedStreamWindow); + } + + // declare variables which need to be initialized + // based on the queries in the parser input + String resultingQueryOutput; + QueryForm resultingQueryForm; + String goal; + List intermediateQueries = new ArrayList<>(); + Set queryPatternPrefixes; + Set sensorQueryRulePrefixes; + + // the original output of the streaming query is also + // the output of the RSP-QL query generated with DIVIDE + // (and similarly for the form of this query) + resultingQueryOutput = parsedStreamQuery.getSplitSparqlQuery().getResultPart(); + resultingQueryForm = parsedStreamQuery.getSplitSparqlQuery().getQueryForm(); + + // in this case, the query pattern prefixes can simply be the prefixes used + // in the streaming query & sensor query rule + queryPatternPrefixes = new HashSet<>(parsedStreamQuery.getPrefixes()); + sensorQueryRulePrefixes = new HashSet<>(parsedStreamQuery.getPrefixes()); + + // in this case, the reasoner goal for DIVIDE is simply this query output + // in both antecedent & consequence + goal = divideQueryGenerator.createGoal( + parsedStreamQuery.getPrefixes(), + resultingQueryOutput, + resultingQueryOutput); + + // convert the parsed stream windows into a set of converted stream windows + List convertedStreamWindows = new ArrayList<>(); + for (ParsedStreamWindow parsedStreamWindow : parsedStreamWindows) { + convertedStreamWindows.add(convertParsedStreamWindow(parsedStreamWindow)); + } + + // generate RSP-QL query based on parsing output + RspQlQueryBody rspQlQueryBody = divideQueryGenerator.createRspQlQueryBody( + resultingQueryForm, + resultingQueryOutput, + parsedStreamQueryWhereClause.getStreamItems(), + input.getSolutionModifier(), + convertedStreamWindows, + this); + + // generate query pattern based on RSP-QL query body and parsing output + // -> first, merge set of prefixes from + String queryPattern = divideQueryGenerator.createQueryPattern( + resultingQueryForm, + queryPatternPrefixes, + rspQlQueryBody.getQueryBody()); + + // update output to be used for sensor query + String sensorQueryResult = extendOutputOfStreamQueryForSensorQueryRule( + parsedStreamQueryWhereClause.getStreamItems(), + parsedStreamQuery.getSplitSparqlQuery().getResultPart(), + sensorQueryRulePrefixes); + + // retrieve input and output variables for sensor query rule + List inputVariables = retrieveInputVariables( + parsedStreamQueryWhereClause.getContextPart(), + rspQlQueryBody.getUnboundVariables()); + List outputVariables = retrieveOutputVariables( + parsedStreamQueryWhereClause.getContextPart(), + sensorQueryResult); + + // check unbound variables of generated RSP-QL query body + if (validateUnboundVariablesInRspQlQueryBody) { + validateUnboundVariablesInRspQlQueryBody( + rspQlQueryBody, inputVariables, input.getReverseVariableMapping(), + input.getFinalQueryVariableMapping()); + } + + // check that solution modifier does not contain an input variable + if (inputVariables.stream().anyMatch( + solutionModifierVariables::contains)) { + throw new InvalidDivideQueryParserInputException( + "Solution modifier contains variable that will be instantiated " + + "by the DIVIDE query derivation"); + } + + // check that solution modifier only contains variables that are occurring + // in the RSP-QL query body + if (!findUnboundVariables(rspQlQueryBody.getQueryBody().replace( + input.getSolutionModifier(), "")) + .containsAll(solutionModifierVariables)) { + throw new InvalidDivideQueryParserInputException( + "Solution modifier contains variables that do not occur in the " + + "instantiated RSP-QL query body"); + } + + // generate sensor query rule + List allWindowParameters = new ArrayList<>(); + for (ConvertedStreamWindow convertedStreamWindow : convertedStreamWindows) { + allWindowParameters.addAll(convertedStreamWindow.getWindowParameters()); + } + String sensorQueryRule = divideQueryGenerator.createSensorQueryRule( + sensorQueryRulePrefixes, + parsedStreamQueryWhereClause.getContextPart(), + sensorQueryResult, + inputVariables, + allWindowParameters, + outputVariables, + intermediateQueries); + + return new DivideQueryParserOutput( + queryPattern, sensorQueryRule, goal, resultingQueryForm); + } + + /** + * Solves prefix conflicts in a given parsed SPARQL query. + * To do so, the method checks whether any of the prefix names in the given + * set of existing prefixes occurs somewhere in the SPARQL query. If this is + * the case, the corresponding URI should be identical to the existing prefix. + * If not, the parsed SPARQL query should be updated: a new prefix should be + * created and used in the corresponding query parts. + * + * @param existingPrefixes set of prefixes to which the prefixes in the given + * SPARQL query should be compared and solved in case + * of conflicts + * @param parsedSparqlQuery parsed SPARQL query which needs to be checked for + * any prefix conflicts + * @return new parsed SPARQL query in which all possible prefix conflicts + * are resolved + */ + private ParsedSparqlQuery solvePrefixConflicts(Set existingPrefixes, + ParsedSparqlQuery parsedSparqlQuery) { + // create a list of conflicting prefixes in the given SPARQL query + Set conflictingPrefixes = new HashSet<>(); + for (Prefix prefix : parsedSparqlQuery.getPrefixes()) { + for (Prefix existingPrefix : existingPrefixes) { + // there is a conflict if a prefix of the given SPARQL query has the + // same name as an existing prefix, but another URI + if (existingPrefix.getName().equals(prefix.getName()) && + !existingPrefix.getUri().equals(prefix.getUri())) { + conflictingPrefixes.add(prefix); + break; + } + } + } + + // if there are no conflicts, the same parsed SPARQL query can be returned + if (conflictingPrefixes.isEmpty()) { + return parsedSparqlQuery; + + } else { + // otherwise, the conflicting prefixes should be given a different name + // -> start from current fields + String prefixPart = parsedSparqlQuery.getSplitSparqlQuery().getPrefixPart(); + String resultPart = parsedSparqlQuery.getSplitSparqlQuery().getResultPart(); + String wherePart = parsedSparqlQuery.getSplitSparqlQuery().getWherePart(); + Set prefixes = new HashSet<>(parsedSparqlQuery.getPrefixes()); + + for (Prefix conflictingPrefix : conflictingPrefixes) { + String newPrefixName = null; + Prefix newPrefix = null; + + // check if a prefix with the same URI already existed + for (Prefix existingPrefix : existingPrefixes) { + if (existingPrefix.getUri().equals(conflictingPrefix.getUri())) { + // if so, this one can be reused! + newPrefixName = existingPrefix.getName(); + newPrefix = existingPrefix; + } + } + // if not, create a new prefix with new name and same URI + if (newPrefixName == null) { + newPrefixName = String.format("newPrefix%d:", PREFIX_COUNTER++); + newPrefix = new Prefix(newPrefixName, conflictingPrefix.getUri()); + } + + // replace prefix name in existing query parts + Pattern replacingPattern = + Pattern.compile("(\\s|\\(|^|\\^)" + conflictingPrefix.getName()); + Matcher m1 = replacingPattern.matcher(prefixPart); + prefixPart = m1.replaceAll("$1" + newPrefixName); + if ((parsedSparqlQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.CONSTRUCT || + parsedSparqlQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.DESCRIBE || + parsedSparqlQuery.getSplitSparqlQuery().getQueryForm() == QueryForm.SELECT) + && resultPart != null) { + Matcher m2 = replacingPattern.matcher(resultPart); + resultPart = m2.replaceAll("$1" + newPrefixName); + } + if (wherePart != null) { + Matcher m3 = replacingPattern.matcher(wherePart); + wherePart = m3.replaceAll("$1" + newPrefixName); + } + + // update set of prefixes + prefixes.remove(conflictingPrefix); + prefixes.add(newPrefix); + } + + // return updated query + return new ParsedSparqlQuery( + new SplitSparqlQuery( + prefixPart, + parsedSparqlQuery.getSplitSparqlQuery().getQueryForm(), + resultPart, + parsedSparqlQuery.getSplitSparqlQuery().getFromPart(), + wherePart, + parsedSparqlQuery.getSplitSparqlQuery().getFinalPart()), + prefixes); + } + } + + /** + * Validates the occurrence of unbound variables in the RSP-QL query body generated + * by this parser. If validation succeeds, this method returns after performing its + * checks. If validation fails, a {@link InvalidDivideQueryParserInputException} is + * thrown. + * Validation fails if the RSP-QL query body contains variables in its result part + * that are not occurring in the WHERE clause, and also not in the input variables + * defined for substitution for the query derivation. If such variables exist, they + * will lead to errors when registering a query. + * Validation also fails if the RSP-QL query body is a SELECT query which contains + * "... AS ?var" expressions, where ?var is already occurring as variable name in + * the WHERE clause of the query OR in the list of input variables. + * + * @param rspQlQueryBody RSP-QL query body that is about to be validated + * @param inputVariables input variables for the DIVIDE query derivation, that may + * occur in the RSP-QL query body but will later be substituted + * @param outputMapping mapping of variables in the parser input to the original input + * variables (needs to be provided to give a clear error message + * to the end user about the problematic variables, is not used + * as such for the validation) + * @throws InvalidDivideQueryParserInputException when validation fails + */ + private void validateUnboundVariablesInRspQlQueryBody(RspQlQueryBody rspQlQueryBody, + List inputVariables, + Map outputMapping, + Map finalQueryMapping) + throws InvalidDivideQueryParserInputException { + print("VALIDATING UNBOUND VARIABLES IN RSP-QL QUERY BODY"); + + // obtain all unbound variables in WHERE clause + Set unboundVariablesInWherePart = + new HashSet<>(findUnboundVariables(rspQlQueryBody.getWherePart())); + + // obtain all unbound variables in result part + // -> special caveat required for SELECT queries + Pair, Set> unboundVariablesInResultPart = + findUnboundVariablesInQueryResultPart( + rspQlQueryBody.getResultPart(), rspQlQueryBody.getQueryForm()); + Set expectedUnboundVariablesInResultPart = unboundVariablesInResultPart.getLeft(); + Set forbiddenUnboundVariablesInResultPart = unboundVariablesInResultPart.getRight(); + + print("Output mapping: " + outputMapping); + print("Final query variable mapping: " + finalQueryMapping); + + print("Mappings for expected unbound variables in result part " + + "(var -> output mapping -> final query variable mapping)"); + for (String s : expectedUnboundVariablesInResultPart) { + String m1 = outputMapping.getOrDefault(s, s); + String m2 = finalQueryMapping.getOrDefault(m1, m1); + print(String.format(" %s - %s - %s", s, m1, m2)); + } + + print("Forbidden variables: " + forbiddenUnboundVariablesInResultPart); + print("WHERE clause: " + rspQlQueryBody.getWherePart()); + print("Unbound variables in WHERE clause: " + unboundVariablesInWherePart); + // validate that none of the forbidden variables occurs in the WHERE clause + List problematicVariables = forbiddenUnboundVariablesInResultPart + .stream() + .filter(s -> unboundVariablesInWherePart.contains(s) + || inputVariables.contains(s) + || expectedUnboundVariablesInResultPart.contains(s)) + .map(s -> outputMapping.getOrDefault(s, s)) + .collect(Collectors.toList()); + if (!problematicVariables.isEmpty()) { + throw new InvalidDivideQueryParserInputException(String.format( + "The SELECT clause of the resulting RSP-QL query body " + + "will contain template variables that are not allowed in the " + + "WHERE clause, but that are present there: %s. This is probably " + + "caused by an invalid SELECT clause in the stream or final query. " + + "Make sure this clause is valid. Also make sure that a correct " + + "mapping file is provided, and/or that variables with identical " + + "names should be mapped (if this is enabled via the settings).", + String.join(", ", problematicVariables))); + } + + print("Validating unbound variables for: " + rspQlQueryBody.getQueryBody()); + print("Input variables at this point: " + inputVariables); + // check if the result part of the RSP-QL query body does not contain any + // invalid unbound variables + // -> invalid means that they do not occur in the WHERE clause, and also not + // in the set of input variables that are about to be replaced + problematicVariables = expectedUnboundVariablesInResultPart + .stream() + .filter(s -> !unboundVariablesInWherePart.contains(s) + && !inputVariables.contains(s)) + .map(s -> { + String m1 = outputMapping.getOrDefault(s, s); + return finalQueryMapping.getOrDefault(m1, m1); + }) + .collect(Collectors.toList()); + if (!problematicVariables.isEmpty()) { + throw new InvalidDivideQueryParserInputException(String.format( + "Resulting RSP-QL query body will contain invalid variables in result part, " + + "that are not present in WHERE clause and will also not be replaced " + + "during the DIVIDE query derivation: %s. Make sure the input is correct. " + + "If the input contains a final query, make sure to define a mapping of a " + + "variable in the stream query to each of these variable in the final query " + + "(or allow automatic mapping of matching variable names via the settings). " + + "If the input only contains a stream query, make sure the WHERE clause " + + "of the stream query contains these variables.", + String.join(", ", problematicVariables))); + } + + print("======================================"); + } + + private Pair, Set> findUnboundVariablesInQueryResultPart(String result, + QueryForm queryForm) + throws InvalidDivideQueryParserInputException { + print("-> FINDING UNBOUND VARIABLES IN QUERY RESULT PART"); + if (queryForm == QueryForm.SELECT) { + Set expectedVariables = new HashSet<>(); + Set forbiddenVariables = new HashSet<>(); + String formattedSelectClause = String.format("%s ", result.trim()); + if (SELECT_CLAUSE_PATTERN.matcher(formattedSelectClause).matches()) { + Matcher m = SELECT_CLAUSE_PATTERN_ENTRY.matcher(formattedSelectClause); + while(m.find()) { + String match = m.group().trim(); + Matcher m2 = SELECT_CLAUSE_EXPRESSION_PATTERN.matcher(match); + if (m2.matches()) { + print(" Expression pattern match: '" + match + "'"); + // if it matches the expression "... AS ?...", then only the first part + // should be returned as a variable, IF it is a variable of course + m2.reset(); + while (m2.find()) { + if (VAR1_PATTERN.matcher(m2.group(1)).matches()) { + expectedVariables.add(m2.group(1)); + print(" Varname in expression: '" + m2.group(1) + "'"); + } else { + print(" NO varname in expression: '" + m2.group(1) + "'"); + } + forbiddenVariables.add(m2.group(2)); + print(" Forbidden variable: " + m2.group(2)); + } + } else { + print(" Varname match: '" + match + "'"); + // if no match with expression, then this match is a single variable name + expectedVariables.add(match); + } + } + return Pair.create(expectedVariables, forbiddenVariables); + + } else { + throw new InvalidDivideQueryParserInputException( + "SELECT clause of resulting RSP-QL query is invalid, which is probably " + + "caused by an invalid SELECT clause in the stream or final query."); + } + } else { + return Pair.create( + new HashSet<>(findUnboundVariables(result)), + new HashSet<>()); + } + } + + /** + * @param query SPARQL query body string + * @return parsed version of the given SPARQL query + * @throws InvalidDivideQueryParserInputException if the query is of invalid syntax + */ + @Override + public ParsedSparqlQuery parseSparqlQuery(String query) + throws InvalidDivideQueryParserInputException { + // first split SPARQL query into its different parts + SplitSparqlQuery splitSparqlQuery = splitSparqlQuery(query); + + // retrieve the prefixes used in this SPARQL query + Set prefixes = getPrefixes(splitSparqlQuery.getPrefixPart()); + + // check for conflicting prefixes + Map prefixMap = new HashMap<>(); + for (Prefix prefix : prefixes) { + if (prefixMap.containsKey(prefix.getName()) && + !prefixMap.get(prefix.getName()).equals(prefix.getUri())) { + throw new InvalidDivideQueryParserInputException( + String.format("Multiple prefixes are present with name '%s'", + prefix.getName())); + } + prefixMap.put(prefix.getName(), prefix.getUri()); + } + + // check for prefix names occurring in query string without being defined as a prefix + String queryWithoutPrefixes = query.replace(splitSparqlQuery.getPrefixPart(), ""); + Set existingPrefixNames = prefixMap.keySet(); + Matcher m = USED_PREFIX_PATTERN.matcher(queryWithoutPrefixes); + while (m.find()) { + if (!existingPrefixNames.contains(m.group(2)) && !"_:".equals(m.group(2))) { + throw new InvalidDivideQueryParserInputException( + String.format("Query string contains undefined prefix '%s'", m.group(2))); + } + } + + // remove prefixes that do not occur in query body + prefixes.removeIf(prefix -> !Pattern.compile("(\\s|\\(|^|\\^)" + prefix.getName()) + .matcher(queryWithoutPrefixes).find()); + + return new ParsedSparqlQuery( + splitSparqlQuery, + prefixes); + } + + /** + * @param query RSP-QL query body string + * @return parsed version of the given RSP-QL query + * @throws InvalidDivideQueryParserInputException if the query is of invalid syntax + */ + private ParsedSparqlQuery parseRspQlQuery(String query) + throws InvalidDivideQueryParserInputException { + // first split RSP-QL query as a SPARQL query into its different parts + SplitSparqlQuery splitSparqlQuery = splitSparqlQuery(query); + + // retrieve the prefixes used in this SPARQL query + Set prefixes = getPrefixes(splitSparqlQuery.getPrefixPart()); + + // check for conflicting prefixes + Map prefixMap = new HashMap<>(); + for (Prefix prefix : prefixes) { + if (prefixMap.containsKey(prefix.getName()) && + !prefixMap.get(prefix.getName()).equals(prefix.getUri())) { + throw new InvalidDivideQueryParserInputException( + String.format("Multiple prefixes are present with name '%s'", + prefix.getName())); + } + prefixMap.put(prefix.getName(), prefix.getUri()); + } + + // check for prefix names occurring in query string without being defined as a prefix + String queryWithoutPrefixes = query.replace(splitSparqlQuery.getPrefixPart(), ""); + Set existingPrefixNames = new HashSet<>(prefixMap.keySet()); + Matcher m = USED_PREFIX_PATTERN.matcher(queryWithoutPrefixes); + while (m.find()) { + if (!existingPrefixNames.contains(m.group(2))) { + if (":".equals(m.group(2))) { + String prefixUri = ""; + splitSparqlQuery = new SplitSparqlQuery( + splitSparqlQuery.getPrefixPart() + " PREFIX : " + prefixUri, + splitSparqlQuery.getQueryForm(), + splitSparqlQuery.getResultPart(), + splitSparqlQuery.getFromPart(), + splitSparqlQuery.getWherePart(), + splitSparqlQuery.getFinalPart()); + existingPrefixNames.add(":"); + prefixes.add(new Prefix(":", prefixUri)); + } else { + throw new InvalidDivideQueryParserInputException( + String.format("Query string contains undefined prefix '%s'", m.group(2))); + } + } + } + + // remove prefixes that do not occur in query body + prefixes.removeIf(prefix -> !Pattern.compile("(\\s|\\(|^|\\^)" + prefix.getName()) + .matcher(queryWithoutPrefixes).find()); + + return new ParsedSparqlQuery( + splitSparqlQuery, + prefixes); + } + + /** + * @param query SPARQL query body string + * @return split SPARQL query containing the different parts + * @throws InvalidDivideQueryParserInputException if the query is of invalid syntax + */ + private SplitSparqlQuery splitSparqlQuery(String query) + throws InvalidDivideQueryParserInputException { + // try to match the query pattern on the SPARQL query + Matcher m = SPARQL_QUERY_SPLIT_PATTERN.matcher(query); + if (m.find()) { + // parse query form + QueryForm queryForm = QueryForm.fromString(m.group(5).trim()); + if (queryForm == null) { + throw new InvalidDivideQueryParserInputException( + "Invalid query form specified in query"); + } + + // parse result part (output) & remove curly braces + String resultPart = m.group(6).trim(); + resultPart = parseQueryResultPart(resultPart, queryForm); + + // create split query & make sure all strings are trimmed + return new SplitSparqlQuery( + m.group(1) == null ? null : m.group(1).trim(), + queryForm, + resultPart, + m.group(8) == null ? null : m.group(8).trim(), + m.group(11) == null ? null : m.group(11).trim(), + m.group(12) == null ? null : m.group(12).trim()); + + } else { + throw new InvalidDivideQueryParserInputException( + "Query does not have valid SPARQL format"); + } + } + + /** + * Parses query result part (output) & removes curly braces if present. + * + * @param resultPart result part string + * @param queryForm form of query + * @return parsed result part string + * @throws InvalidDivideQueryParserInputException if result part is invalid + */ + private String parseQueryResultPart(String resultPart, QueryForm queryForm) + throws InvalidDivideQueryParserInputException { + if (resultPart.startsWith("{")) { + if (resultPart.endsWith("}")) { + resultPart = resultPart.substring(1, resultPart.length() - 1).trim(); + } else { + throw new InvalidDivideQueryParserInputException( + String.format("Format of %s clause is invalid", queryForm.toString())); + } + } else { + if (resultPart.endsWith("}")) { + throw new InvalidDivideQueryParserInputException( + String.format("Format of %s clause is invalid", queryForm.toString())); + } + } + return resultPart; + } + + /** + * @param prefixString string of prefixes as defined in a SPARQL or RSP-QL query + * @return parsed set of prefixes + */ + private Set getPrefixes(String prefixString) { + Matcher m = PREFIX_PATTERN.matcher(prefixString); + Set prefixes = new HashSet<>(); + while (m.find()) { + String prefixName = m.group(2).trim(); + String prefixURI = m.group(3).trim(); + prefixes.add(new Prefix(prefixName, prefixURI)); + } + return prefixes; + } + + /** + * @param fromPart string with FROM clauses as defined in SPARQL query + * @param prefixes set of prefixes to be used for resolving the graph names + * occurring in the FROM clauses of the SPARQL query + * @return pair with as left value the strings of actual graph names appearing + * in the FROM clauses, as right value the remainder of the FROM part + * with all matching named graphs removed + * @throws InvalidDivideQueryParserInputException if any of the graph names + * occurring in the FROM clause + * is invalid + */ + private Pair, String> retrieveGraphNamesFromSparqlFromPart(String fromPart, + Set prefixes) + throws InvalidDivideQueryParserInputException { + String fromPartLeftover = fromPart; + Matcher matcher = SPARQL_FROM_NAMED_GRAPH_PATTERN.matcher(fromPart); + List graphNames = new ArrayList<>(); + while (matcher.find()) { + graphNames.add(resolveGraphName(matcher.group(1), prefixes)); + fromPartLeftover = fromPartLeftover.replace(matcher.group().trim(), "").trim(); + } + return Pair.create(graphNames, fromPartLeftover); + } + + /** + * @param fromPart string with FROM clauses as defined in RSP-QL query + * @param prefixes set of prefixes to be used for resolving the graph names + * occurring in the FROM clauses of the SPARQL query + * @return pair with as left value the strings of actual graph names appearing + * in the FROM clauses, as right value the remainder of the FROM part + * with all matching named graphs removed + * @throws InvalidDivideQueryParserInputException if any of the graph names + * occurring in the FROM clause + * is invalid + */ + private Pair, String> retrieveGraphNamesFromRspQlFromPart(String fromPart, + Set prefixes) + throws InvalidDivideQueryParserInputException { + String fromPartLeftover = fromPart; + Matcher matcher = RSP_QL_FROM_NAMED_GRAPH_PATTERN.matcher(fromPart); + List graphNames = new ArrayList<>(); + while (matcher.find()) { + graphNames.add(resolveGraphName(matcher.group(1), prefixes)); + fromPartLeftover = fromPartLeftover.replace(matcher.group().trim(), "").trim(); + } + return Pair.create(graphNames, fromPartLeftover); + } + + /** + * @param streamWindows list of possibly incomplete stream windows, which might not contain + * the stream window definition + * @param fromPart string with FROM clauses as defined in RSP-QL query + * @return stream windows completed according to how they are appearing in these FROM clauses, + * associated in a map to a key representing the window name in the query + * @throws InvalidDivideQueryParserInputException when a window name is defined more + * than once, or if any of the graph names + * occurring in the FROM clause is invalid + */ + private Map completeStreamWindowsFromRspQlFromPart(List streamWindows, + String fromPart, + Set prefixes) + throws InvalidDivideQueryParserInputException { + String fromPartLeftover = fromPart; + Matcher matcher = RSP_QL_FROM_NAMED_WINDOW_PATTERN.matcher(fromPart); + Map streamWindowMap = new HashMap<>(); + while (matcher.find()) { + String windowName = resolveGraphName(matcher.group(1), prefixes); + if (streamWindowMap.containsKey(windowName)) { + throw new InvalidDivideQueryParserInputException( + String.format("Window name '%s' defined more than once", windowName)); + } + String streamName = resolveGraphName(matcher.group(2), prefixes); + Matcher m2 = RSP_QL_WINDOW_PARAMETERS_PATTERN.matcher(matcher.group(3)); + if (m2.find()) { + streamWindowMap.put(windowName, new StreamWindow(streamName, + String.format("%s %s", m2.group(1), m2.group(7)))); + } else { + throw new InvalidDivideQueryParserInputException( + String.format("Window definition of stream '%s' is no " + + "valid RSP-QL", streamName)); + } + fromPartLeftover = fromPartLeftover.replace(matcher.group().trim(), "").trim(); + } + if (!fromPartLeftover.trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + String.format("RSP-QL query contains invalid part '%s'", fromPartLeftover)); + } + + // check if every stream window defined in the JSON config also occurs + // in the RSP-QL from part, and append the default window parameters + Collection rspQlStreamWindows = streamWindowMap.values(); + for (StreamWindow definedStreamWindow : streamWindows) { + Optional matchingStreamWindow = rspQlStreamWindows.stream() + .filter(sw -> sw.getStreamIri().equals(definedStreamWindow.getStreamIri())) + .findFirst(); + if (matchingStreamWindow.isPresent()) { + if (definedStreamWindow.getWindowDefinition() != null && + !matchingStreamWindow.get().getWindowDefinition().equals( + definedStreamWindow.getWindowDefinition())) { + throw new InvalidDivideQueryParserInputException(String.format( + "Configuration contains stream window with IRI '%s' that has a different window " + + "definition than the corresponding stream window present in the " + + "RSP-QL stream query", definedStreamWindow.getStreamIri())); + } + matchingStreamWindow.get().setDefaultWindowParameterValues( + definedStreamWindow.getDefaultWindowParameterValues()); + } else { + throw new InvalidDivideQueryParserInputException(String.format( + "Configuration contains stream window with IRI '%s' that does not occur " + + "in the RSP-QL stream query", definedStreamWindow.getStreamIri())); + } + } + + return streamWindowMap; + } + + /** + * @param whereClause extracted WHERE clause of a SPARQL or RSP-QL query + * @param prefixes set of prefixes used in this query + * @param inputGraphNames graph names specified in the FROM clauses of this query + * @param queryLanguage language used for specifying the WHERE clause of this query + * (this can either be SPARQL or RSP-QL) + * @return parsed WHERE clause of the query, containing a list of WHERE clause + * items which can either be graphs or expressions + * @throws InvalidDivideQueryParserInputException if the WHERE clause contains invalid + * graph names + */ + private WhereClause parseWhereClauseOfQuery(String whereClause, + Set prefixes, + List inputGraphNames, + InputQueryLanguage queryLanguage) + throws InvalidDivideQueryParserInputException { + List items = new ArrayList<>(); + + // pattern to be used for parsing depends on query language + Pattern pattern; + if (queryLanguage == InputQueryLanguage.SPARQL) { + pattern = SPARQL_WHERE_CLAUSE_GRAPH_PATTERN; + } else { // RSP_QL query language + pattern = RSP_QL_WHERE_CLAUSE_GRAPH_OR_WINDOW_PATTERN; + } + + // make sure WHERE clauses is trimmed before parsing + whereClause = whereClause.trim(); + + // try to find graph patterns in WHERE clauses + Matcher matcher = pattern.matcher(whereClause); + int lastEndIndex = 0; + + // loop over all found graph patterns + while (matcher.find()) { + int startIndex = matcher.start(); + int endIndex = matcher.end(); + + // update indices & create expression item if some text is found in between + // last match and this match + if (startIndex != lastEndIndex) { + String expression = whereClause.substring(lastEndIndex, startIndex).trim(); + items.add(new WhereClauseExpressionItem(expression)); + } + + // find end of graph pattern + int braceCount = 1; + int loopIndex = endIndex + 1; + while (braceCount > 0) { + char c = whereClause.charAt(loopIndex); + if (c == '{') { + braceCount++; + } else if (c == '}') { + braceCount--; + } + loopIndex++; + } + lastEndIndex = loopIndex; + + // parse name of found graph + String name = matcher.group(2).trim(); + name = resolveGraphName(name, prefixes); + if (!inputGraphNames.contains(name)) { + throw new InvalidDivideQueryParserInputException( + String.format("Graph name '%s' not specified with FROM", name)); + } + + // parse clause of found graph + String clause = whereClause.substring(endIndex + 1, loopIndex - 1).trim(); + + // create graph item for the found graph with parsed name & clause + items.add(new WhereClauseGraphItem(new Graph(name, clause))); + } + + // process possible expression after last found graph + // (if no graph is found, this expression will contain the full WHERE clause) + if (lastEndIndex != whereClause.length()) { + String lastExpression = whereClause.substring(lastEndIndex).trim(); + items.add(new WhereClauseExpressionItem(lastExpression)); + } + + return new WhereClause(items); + } + + /** + * Resolves a graph name against a set of prefixes. + * If the graph name is not an IRI (<...>), then it should start with a + * prefix in the specified list. + * + * @param graphName graph name to be resolved + * @param prefixes set of prefixes to be used for resolving the graph name + * @return resolved graph name (can be the same as the input if it was + * already a valid IRI) + * @throws InvalidDivideQueryParserInputException if the graph name is invalid + * (invalid syntax or non-existing prefix) + */ + private String resolveGraphName(String graphName, Set prefixes) + throws InvalidDivideQueryParserInputException { + // parse name of found graph + if (graphName.startsWith("<")) { + if (!graphName.endsWith(">")) { + throw new InvalidDivideQueryParserInputException( + String.format("Invalid graph name '%s'", graphName)); + } + } else { + boolean matched = false; + for (Prefix prefix : prefixes) { + if (graphName.startsWith(prefix.getName())) { + matched = true; + String afterPrefix = graphName.replaceFirst(Pattern.quote(prefix.getName()), ""); + graphName = String.format("%s%s>", + prefix.getUri().substring(0, prefix.getUri().length() - 1), + afterPrefix); + break; + } + } + if (!matched) { + throw new InvalidDivideQueryParserInputException( + String.format("Invalid graph name '%s' (no valid IRI" + + " & no existing prefix used)", graphName)); + } + } + return graphName; + } + + /** + * Processes the WHERE clause items in the parsed WHERE clause of a SPARQL + * or RSP-QL query. + * This processing is an additional parsing step: the WHERE clause items are split + * based on whether they depend on the context, or depend on one or more streams + * specified as stream graph IRIs in the input of the parser. The context expressions + * are appended and returned as a string in the processed result, whereas the stream + * expressions are still returned as an ordered list of items (i.e., either graph + * patterns or expressions with a SPARQL keyword pattern outside a graph pattern). + * While processing, the items are verified and an exception is thrown if anything + * is not valid. + * + * @param whereClause parsed WHERE clause of SPARQL OR RSP-QL query + * @param streamGraphNames names (IRIs) of stream graphs as specified in the parser input + * @return the processed WHERE clause of the stream query + * @throws InvalidDivideQueryParserInputException if an expression is present outside a graph without + * an associated SPARQL keyword (these should be + * placed inside a graph pattern), OR when an expression + * is present inside a graph that is not reading from + * a stream, but that contains a SPARQL pattern (because + * then it is part of the context, and this context cannot + * contain any SPARQL patterns, so this pattern should + * be placed outside the graph then) + */ + private ParsedStreamQueryWhereClause parseStreamQueryWhereClauseOfQuery( + WhereClause whereClause, List streamGraphNames) + throws InvalidDivideQueryParserInputException { + // prepare results of parsing + StringBuilder contextPart = new StringBuilder(); + List streamItems = new ArrayList<>(); + + for (WhereClauseItem item : whereClause.getItems()) { + if (item.getItemType() == WhereClauseItemType.EXPRESSION) { + WhereClauseExpressionItem expressionItem = (WhereClauseExpressionItem) item; + + // expression items are verified and split based on SPARQL keywords, + // and are included into the items that depend on the input stream(s) + streamItems.addAll( + verifyAndSplitStreamQueryWhereClauseExpressionItemsBySparqlKeywords(expressionItem)); + + } else if (item.getItemType() == WhereClauseItemType.GRAPH) { + WhereClauseGraphItem graphItem = (WhereClauseGraphItem) item; + + // graph patterns are handled differently based on the specified name + // of the graph in the pattern + if (streamGraphNames.contains(graphItem.getGraph().getName())) { + // if the graph name is specified as a stream graph in the parser input, + // the whole pattern is included into the set of items that depend on + // the input stream(s) + streamItems.add(graphItem); + } else { + // if the graph name is NOT specified as a stream graph, then it should + // first be checked that its expressions doe not contain any SPARQL + // keyword (because these will become the context part which will be + // added as the consequence of the sensor query rule, so it cannot contain + // any SPARQL keywords) + String graphItemLowerCaseClause = + graphItem.getGraph().getClause().toLowerCase(Locale.ROOT); + boolean containsSparqlKeyword = POSSIBLE_WHERE_CLAUSE_SPARQL_KEYWORDS + .stream() + .anyMatch(graphItemLowerCaseClause::contains); + if (containsSparqlKeyword) { + throw new InvalidDivideQueryParserInputException( + "Non-streaming graph patterns of stream query cannot contain " + + "special SPARQL keywords - such expressions should " + + "be placed outside the graph"); + } + + // if no SPARQL keyword is present, the expressions in the graph pattern + // can be safely added to the context part of the stream query WHERE clause + contextPart.append(graphItem.getGraph().getClause()).append(" "); + } + } + } + + return new ParsedStreamQueryWhereClause( + contextPart.toString().trim(), streamItems); + } + + /** + * Verifies and splits an individual expression item of the parsed WHERE clause of + * the stream query. The splitting will split the individual items into a single part + * per SPARQL keyword pattern. The verification is a check whether no expressions + * occur in this expression item (i.e., outside a graph) without an associated + * SPARQL keyword. + * + * @param expressionItem individual expression item of the parsed WHERE clause of + * the stream query + * @return a list of expression items originating from the original expression item, + * but split based on SPARQL keyword patterns + * @throws InvalidDivideQueryParserInputException if an expression occurs in this expression + * item (i.e., outside a graph) without an + * associated SPARQL keyword (these should be + * placed inside a graph pattern) + */ + private List verifyAndSplitStreamQueryWhereClauseExpressionItemsBySparqlKeywords( + WhereClauseExpressionItem expressionItem) throws InvalidDivideQueryParserInputException { + List resultItems = new ArrayList<>(); + String expressionLeftover = expressionItem.getExpression(); + + // scan the expression for special SPARQL patterns, i.e., parts that start with + // SPARQL keyword followed by any character but a keyword + // (so if multiple keywords occur, there will be multiple matches) + Matcher expressionMatcher = SPECIAL_SPARQL_PATTERN.matcher( + expressionItem.getExpression()); + while (expressionMatcher.find()) { + String match = expressionMatcher.group(); + + // if the match involves a FILTER (NOT) EXISTS pattern, then the braces should + // be scanned to find the end of the pattern + // (instead of considering the end as the end of the pattern match) + if (match.matches("^FILTER\\s+(NOT\\s+)?EXISTS\\s+\\{.*")) { + // find end of FILTER (NOT) EXISTS pattern + int braceCount = 1; + int loopIndex = match.indexOf("{") + 1; + while (braceCount > 0) { + char c = expressionLeftover.charAt(loopIndex); + if (c == '{') { + braceCount++; + } else if (c == '}') { + braceCount--; + } + loopIndex++; + } + + // update match to reach from start to this end + match = expressionLeftover.substring(0, loopIndex).trim(); + } + + // every match will be added as a separate WHERE clause expression item in the list + resultItems.add(new WhereClauseExpressionItem(match)); + // this match is removed once from the original expression + expressionLeftover = expressionLeftover.replaceFirst( + Pattern.quote(match), "").trim(); + // since the match can be made larger than the actual pattern match + // (for FILTER (NOT) EXISTS patterns), a new matcher is created + expressionMatcher = SPECIAL_SPARQL_PATTERN.matcher(expressionLeftover); + } + + // if the original expression still contains text, this means that this part does + // not start with a known SPARQL keyword => in that case, this expression should + // be added to one of the graphs of the stream query WHERE clause + if (!expressionLeftover.isEmpty()) { + throw new InvalidDivideQueryParserInputException( + String.format("SPARQL pattern without known keyword found " + + "outside graph in stream query WHERE clause: %s", expressionLeftover)); + } + return resultItems; + } + + /** + * Retrieves the input variables to be specified in a DIVIDE sensor query rule. + * For this, it checks which variables occur in both the antecedent of the rule + * (i.e., the context-dependent part of the stream query) and the RSP-QL query + * body. These variables will be substituted into the RSP-QL query body after + * the DIVIDE query derivation. + * + * @param contextPartOfSensorQueryRule context part of sensor query rule, i.e., + * its antecedent + * @param rspQlQueryBodyVariables unbound variables in RSP-QL query body that is + * referenced in the sensor query rule via the + * query pattern + * @return the unbound variables that occur both in the antecedent and consequence + * of the sensor query rule, and that should therefore be specified as + * input variables + */ + private List retrieveInputVariables(String contextPartOfSensorQueryRule, + Set rspQlQueryBodyVariables) { + List antecedentVariables = + findUnboundVariables(contextPartOfSensorQueryRule); + + return antecedentVariables + .stream() + .filter(rspQlQueryBodyVariables::contains) + .collect(Collectors.toList()); + } + + /** + * Retrieves the output variables to be specified in a DIVIDE sensor query rule. + * For this, it checks which variables occur in the output of the stream query + * (this ends up in the consequence of the sensor query rule) that do NOT occur + * in the context part of the sensor query rule; these variables are the output + * variables that should be replaced by a blank node in the sensor query rule. + * + * @param contextPartOfSensorQueryRule context part of sensor query rule, i.e., + * its antecedent + * @param streamQueryResult output of the stream query + * @return the unbound variables that occur in the output of the stream query, + * and not in the context part of the sensor query rule + */ + private List retrieveOutputVariables(String contextPartOfSensorQueryRule, + String streamQueryResult) { + List antecedentVariables = + findUnboundVariables(contextPartOfSensorQueryRule); + List resultVariables = findUnboundVariables(streamQueryResult); + + return resultVariables + .stream() + .filter(s -> !antecedentVariables.contains(s)) + .collect(Collectors.toList()); + } + + /** + * @param queryPart any part of a SPARQL or RSP-QL query (can also be the full query body) + * @return a list of unbound variables that are present in the given query part + */ + List findUnboundVariables(String queryPart) { + Matcher matcher = UNBOUND_VARIABLES_PATTERN.matcher(queryPart); + Set unboundVariables = new LinkedHashSet<>(); + while (matcher.find()) { + unboundVariables.add(matcher.group()); + } + return new ArrayList<>(unboundVariables); + } + + private List findUnboundVariablesInWindowParameters(StreamWindow streamWindow) { + Set unboundVariables = new LinkedHashSet<>(); + String definition = streamWindow.getWindowDefinition(); + Matcher matcher = UNBOUND_VARIABLES_IN_STREAM_WINDOW_PATTERN.matcher(definition); + while (matcher.find()) { + unboundVariables.add("?" + matcher.group(1)); + } + return new ArrayList<>(unboundVariables); + } + + /** + * Extends the output of the streaming query to be used in the consequence of + * the sensor query rule. For this, it starts from the original stream query + * output, and enriches it with all items in the stream-dependent parts of the + * WHERE clause of the stream query. + * + * @param parsedStreamQueryWhereClauseStreamItems stream items occurring in parsed + * WHERE clause of stream query + * @param streamQueryOutput defined output of stream query + * @param prefixes prefixes to be used for the sensor query rule + * @return extended output to be used in consequence of sensor query rule + */ + private String extendOutputOfStreamQueryForSensorQueryRule( + List parsedStreamQueryWhereClauseStreamItems, + String streamQueryOutput, + Set prefixes) throws InvalidDivideQueryParserInputException { + Map variableMapping = new HashMap<>(); + + // create prefix string for queries + String queryPrefixString = prefixes + .stream() + .map(prefix -> String.format("PREFIX %s %s", + prefix.getName(), prefix.getUri())) + .collect(Collectors.joining(" ")); + + // create prefix string in Turtle format + String turtlePrefixString = divideQueryGenerator.getTurtlePrefixList(prefixes); + + // search all unbound variables in stream query output + // -> sort to avoid any replacement issues later + // -> create random unique mapping of variable to URI + List outputVariables = findUnboundVariables(streamQueryOutput); + outputVariables = outputVariables + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + for (String outputVariable : outputVariables) { + variableMapping.put(outputVariable, createUnboundVariableMapping()); + } + + // create Jena model of sensor query rule output, combining both the + // stream query output and the streaming part in its WHERE clause + Model model = ModelFactory.createDefaultModel(); + + // transform output to Turtle string with prefixes + // -> map every unbound variable to its mapping + // -> add statements to resulting Jena model + String transformedOutput = turtlePrefixString + "\n" + streamQueryOutput; + for (String outputVariable : outputVariables) { + transformedOutput = transformedOutput.replace( + outputVariable, variableMapping.get(outputVariable)); + } + Model transformedOutputModel = JenaUtilities.parseString(transformedOutput, RDFLanguage.TURTLE); + if (transformedOutputModel == null) { + throw new InvalidDivideQueryParserInputException( + "Parser will generate invalid output of sensor query rule. " + + "This is caused by an invalid stream query."); + } + model.add(transformedOutputModel); + + // loop over all streaming items in the WHERE clause of the stream query + for (WhereClauseItem item : parsedStreamQueryWhereClauseStreamItems) { + // retrieve content of graph + String itemContent = ""; + if (item.getItemType() == WhereClauseItemType.EXPRESSION) { + itemContent = ((WhereClauseExpressionItem) item).getExpression(); + } else if (item.getItemType() == WhereClauseItemType.GRAPH) { + itemContent = ((WhereClauseGraphItem) item).getGraph().getClause(); + } + + // find unbound variables in this part & sort to avoid replacement issues + List itemUnboundVariables = findUnboundVariables(itemContent); + itemUnboundVariables = itemUnboundVariables + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + + // map all variables to existing or new mapping and replace + // these variables by their mappings + for (String itemUnboundVariable : itemUnboundVariables) { + String mapping; + if (variableMapping.containsKey(itemUnboundVariable)) { + mapping = variableMapping.get(itemUnboundVariable); + } else { + mapping = createUnboundVariableMapping(); + variableMapping.put(itemUnboundVariable, mapping); + } + itemContent = itemContent.replace(itemUnboundVariable, mapping); + } + + // create Jena SPARQL query with this pattern + // (required to filter out SPARQL patterns such as FILTER; if not filtered + // out, some WHERE clause patters will yield conversion issues since these + // SPARQL patterns are no valid Turtle + String query = String.format("%s SELECT * WHERE { %s }", queryPrefixString, itemContent); + Query q; + try { + q = QueryFactory.create(query); + } catch (Exception e) { + throw new InvalidDivideQueryParserInputException( + String.format("Error in input which causes the following " + + "invalid parsed SPARQL clause: %s", itemContent)); + } + + // only filter actual triple blocks in WHERE clause content + final String[] processedItemContent = {""}; + ElementWalker.walk(q.getQueryPattern(), + new ElementVisitorBase() { + public void visit(ElementPathBlock el) { + processedItemContent[0] += el.toString(); + } + } + ); + + // only proceed with this stream item if processed content is not empty + // (if empty, this means it only consisted of SPARQL patterns) + if (!processedItemContent[0].isEmpty()) { + // replace variables created by Jena for blank nodes when parsing + // the query WHERE clause -> these may be converted back to blank nodes, + // since the result will be used to parse as Turtle + processedItemContent[0] = processedItemContent[0].replace("??", "_:"); + + // parse Turtle string of processed content as triples and + // add them to resulting Jena model + String turtleString = String.format("%s\n%s .", + turtlePrefixString, + processedItemContent[0]); + Model parsed = JenaUtilities.parseString( + turtleString, + RDFLanguage.TURTLE); + model.add(parsed); + } + } + + // create model with triples that involve any of the output variables + /*Model result = ModelFactory.createDefaultModel(); + for (String outputVariable : outputVariables) { + String mapping = variableMapping.get(outputVariable); + mapping = mapping.substring(1, mapping.length() - 1); + + result.add(model.listStatements(new SimpleSelector( + new ResourceImpl(mapping), null, (Object) null))); + } + model = result;*/ + + // clear prefixes to ensure output string does not contain any prefix definitions + model.clearNsPrefixMap(); + + // sort key set of mappings to avoid replacement issues + List sortedUnboundVariables = variableMapping.keySet() + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + + // create Turtle string of constructed resulting Jena model + String extraOutput = JenaUtilities.serializeModel(model, RDFLanguage.TURTLE); + + // replace mappings of unbound variables back to the original unbound variable + for (String unboundVariable : sortedUnboundVariables) { + extraOutput = extraOutput.replace( + variableMapping.get(unboundVariable), unboundVariable); + } + + return extraOutput; + } + + private String createUnboundVariableMapping() { + return String.format( + "", + UUID.randomUUID()); + } + + private String generateRandomUnboundVariable() { + String base = "?" + UUID.randomUUID(); + return base.replace("-", ""); + } + + private Set getUnboundVariablesInInput(DivideQueryParserInput input) { + Set unboundVariables = new HashSet<>( + findUnboundVariables(input.getStreamQuery())); + if (input.getFinalQuery() != null) { + unboundVariables.addAll(findUnboundVariables(input.getFinalQuery())); + } + if (input.getIntermediateQueries() != null) { + for (String intermediateQuery : input.getIntermediateQueries()) { + unboundVariables.addAll(findUnboundVariables(intermediateQuery)); + } + } + return unboundVariables; + } + + private CleanDivideQueryParserInput cleanInputFromOverlappingVariables( + MappedDivideQueryParserInput input) { + // create set of all existing unbound variables + Set unboundVariables = getUnboundVariablesInInput(input); + + // check if there is any unbound variable that is contained in another + // -> label the longer variable as problematic, since we will change that one + // (in this way we avoid replacement issues in this method as well) + List problematicVariables = unboundVariables + .stream() + .filter(s -> unboundVariables.stream().anyMatch( + s1 -> s.contains(s1) && !s.equals(s1))) + .collect(Collectors.toList()); + + // if no such variable exists, then the same input can be used further on + if (problematicVariables.isEmpty()) { + CleanDivideQueryParserInput result = + new CleanDivideQueryParserInput(input); + result.setUnboundVariables(unboundVariables); + result.setFinalQueryVariableMapping(input.getFinalQueryVariableMapping()); + return result; + } + + // otherwise, remove problematic variables from set of new variables + Set newVariables = unboundVariables + .stream() + .filter(s -> !problematicVariables.contains(s)) + .collect(Collectors.toSet()); + + // sort problematic variables accordingly to avoid replacement issues + List sortedProblematicVariables = problematicVariables + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + + // create a mapping for each problematic variable to a new issue-less variable + Map variableMapping = new HashMap<>(); + for (String problematicVariable : sortedProblematicVariables) { + String newVariable = null; + boolean variableAccepted = false; + while (!variableAccepted) { + String triedNewVariable = generateRandomUnboundVariable(); + variableAccepted = newVariables.stream().noneMatch(triedNewVariable::contains); + if (variableAccepted) { + newVariable = triedNewVariable; + } + } + variableMapping.put(problematicVariable, newVariable); + } + + // do in-order replacements of all new variables in each query + String streamQuery = input.getStreamQuery(); + String finalQuery = input.getFinalQuery(); + List intermediateQueries = input.getIntermediateQueries(); + String solutionModifier = input.getSolutionModifier(); + for (String problematicVariable : sortedProblematicVariables) { + streamQuery = streamQuery.replaceAll( + Pattern.quote(problematicVariable), + variableMapping.get(problematicVariable)); + if (finalQuery != null) { + finalQuery = finalQuery.replaceAll( + Pattern.quote(problematicVariable), + variableMapping.get(problematicVariable)); + } + if (intermediateQueries != null) { + intermediateQueries = intermediateQueries + .stream() + .map(s -> s.replaceAll( + Pattern.quote(problematicVariable), + variableMapping.get(problematicVariable))) + .collect(Collectors.toList()); + } + if (solutionModifier != null) { + solutionModifier = solutionModifier.replaceAll( + Pattern.quote(problematicVariable), + variableMapping.get(problematicVariable)); + } + } + + CleanDivideQueryParserInput result = + new CleanDivideQueryParserInput( + input.getInputQueryLanguage(), + input.getStreamWindows(), + streamQuery, + intermediateQueries, + finalQuery, + solutionModifier, + variableMapping); + result.setUnboundVariables(getUnboundVariablesInInput(result)); + result.setFinalQueryVariableMapping(input.getFinalQueryVariableMapping()); + return result; + } + + private ParsedStreamWindow parseStreamWindow(StreamWindow streamWindow, + Map variableMapping) + throws InvalidDivideQueryParserInputException { + List windowParameterVariables = + findUnboundVariablesInWindowParameters(streamWindow); + + String streamIri = streamWindow.getStreamIri(); + String windowDefinition = streamWindow.getWindowDefinition(); + Map defaultWindowParameterValues = streamWindow.getDefaultWindowParameterValues(); + + Set finalWindowParameterVariables = new HashSet<>(); + for (String windowParameterVariable : windowParameterVariables) { + if (variableMapping.containsKey(windowParameterVariable)) { + windowDefinition = windowDefinition.replaceAll( + Pattern.quote(String.format("?{%s}", + windowParameterVariable.substring(1))), + String.format("?{%s}", variableMapping.get( + windowParameterVariable).substring(1))); + + if (defaultWindowParameterValues.containsKey(windowParameterVariable)) { + String value = defaultWindowParameterValues.get(windowParameterVariable); + defaultWindowParameterValues.remove(windowParameterVariable); + defaultWindowParameterValues.put(variableMapping.get(windowParameterVariable), value); + } + + finalWindowParameterVariables.add( + variableMapping.get(windowParameterVariable)); + + } else { + finalWindowParameterVariables.add(windowParameterVariable); + } + } + + for (String s : defaultWindowParameterValues.keySet()) { + if (!finalWindowParameterVariables.contains(s)) { + throw new InvalidDivideQueryParserInputException(String.format( + "Configuration of stream window with IRI '%s' contains default " + + "value for variable '%s' which does not occur in window definition", + streamIri, s)); + } + } + + return new ParsedStreamWindow( + streamIri, windowDefinition, defaultWindowParameterValues, finalWindowParameterVariables); + } + + private List parseSelectClause(String selectClause) { + List result = new ArrayList<>(); + String formattedSelectClause = String.format("%s ", selectClause.trim()); + if (SELECT_CLAUSE_PATTERN.matcher(formattedSelectClause).matches()) { + Matcher m = SELECT_CLAUSE_PATTERN_ENTRY.matcher(formattedSelectClause); + while (m.find()) { + result.add(m.group().trim()); + } + } + return result; + } + + private ConvertedStreamWindow convertParsedStreamWindow(ParsedStreamWindow parsedStreamWindow) + throws InvalidDivideQueryParserInputException { + String streamIri = parsedStreamWindow.getStreamIri(); + String windowDefinition = parsedStreamWindow.getWindowDefinition(); + Map defaults = parsedStreamWindow.getDefaultWindowParameterValues(); + List windowParameters = new ArrayList<>(); + + Matcher m = RSP_QL_WINDOW_PARAMETERS_PATTERN.matcher(windowDefinition); + if (!m.find()) { + throw new InvalidDivideQueryParserInputException("KLOPT NIET"); + } + + String range = m.group(2); + String fromTo = m.group(4); + String step = m.group(8); + if (range != null) { + String rangeParam = m.group(3); + WindowParameter rangeWp = createWindowParameter(rangeParam, defaults); + windowParameters.add(rangeWp); + windowDefinition = windowDefinition.replaceAll( + Pattern.quote(rangeParam), String.format("?{%s}", rangeWp.getVariable().substring(1))); + + } else if (fromTo != null) { + String fromParam = m.group(5); + WindowParameter fromWp = createWindowParameter(fromParam, defaults); + windowParameters.add(fromWp); + windowDefinition = windowDefinition.replaceAll( + Pattern.quote(fromParam), String.format("?{%s}", fromWp.getVariable().substring(1))); + + String toParam = m.group(6); + WindowParameter toWp = createWindowParameter(toParam, defaults); + windowParameters.add(toWp); + windowDefinition = windowDefinition.replaceAll( + Pattern.quote(toParam), String.format("?{%s}", toWp.getVariable().substring(1))); + + } else { + throw new InvalidDivideQueryParserInputException("CANNOT MATCH??"); + } + + if (step != null) { + String stepParam = m.group(9); + WindowParameter stepWp = createWindowParameter(stepParam, defaults); + windowParameters.add(stepWp); + windowDefinition = windowDefinition.replaceAll( + Pattern.quote(stepParam), String.format("?{%s}", stepWp.getVariable().substring(1))); + } + + return new ConvertedStreamWindow( + streamIri, + windowDefinition, + windowParameters); + } + + private WindowParameter createWindowParameter(String parameter, + Map defaultWindowParameterValues) + throws InvalidDivideQueryParserInputException { + Matcher m = STREAM_WINDOW_PARAMETER_VARIABLE_PATTERN.matcher(parameter); + if (m.find()) { + String fullDuration = m.group(2); + String number = m.group(13); + + String variableName; + WindowParameter.WindowParameterType type = null; + + if (fullDuration != null) { + variableName = extractNameFromWindowParameterVariable(fullDuration); + type = WindowParameter.WindowParameterType.XSD_DURATION; + + } else if (number != null) { + variableName = extractNameFromWindowParameterVariable(number); + String typeString = m.group(23); + if ("S".equals(typeString)) { + type = WindowParameter.WindowParameterType.TIME_SECONDS; + } else if ("M".equals(typeString)) { + type = WindowParameter.WindowParameterType.TIME_MINUTES; + } else if ("H".equals(typeString)) { + type = WindowParameter.WindowParameterType.TIME_HOURS; + } + + } else { + // impossible + throw new InvalidDivideQueryParserInputException("IMPOSSIBLE"); + } + + // extract whether the variable is specified as a default or should be replaced + // by the query derivation + String defaultValue = defaultWindowParameterValues.getOrDefault("?" + variableName, null); + if (defaultValue != null) { + return new WindowParameter( + "?" + variableName, + defaultValue, + type, + false); + } else { + return new WindowParameter( + "?" + variableName, + "?" + variableName, + type, + true); + } + + } else { + Matcher m2 = STREAM_WINDOW_PARAMETER_NUMBER_PATTERN.matcher(parameter); + if (m2.find()) { + WindowParameter.WindowParameterType type = null; + String value = m2.group(2); + String typeString = m2.group(3); + if ("S".equals(typeString)) { + type = WindowParameter.WindowParameterType.TIME_SECONDS; + } else if ("M".equals(typeString)) { + type = WindowParameter.WindowParameterType.TIME_MINUTES; + } else if ("H".equals(typeString)) { + type = WindowParameter.WindowParameterType.TIME_HOURS; + } + return new WindowParameter( + generateRandomUnboundVariable(), + value, type, false); + } else { + throw new InvalidDivideQueryParserInputException("invalid entry"); + } + } + } + + private String extractNameFromWindowParameterVariable(String variable) { + return variable.substring(2, variable.length() - 1); + } + + @SuppressWarnings("SameParameterValue") + private void validateSparqlQuery(String query, String prefix) + throws InvalidDivideQueryParserInputException { + try { + print("VALIDATING " + prefix.toUpperCase() + " QUERY: " + query); + print("======================================="); + QueryFactory.create(query); + } catch (Exception e) { + throw new InvalidDivideQueryParserInputException( + String.format("%s query is invalid SPARQL", prefix)); + } + } + + private DivideQueryParserOutput restoreOriginalVariablesInOutput(DivideQueryParserOutput output, + Map variableMapping) { + // do replacement for all variables + String queryPattern = output.getQueryPattern(); + String sensorQueryRule = output.getSensorQueryRule(); + String goal = output.getGoal(); + for (String s : variableMapping.keySet()) { + // do a first substitution specifically for the window definition + queryPattern = queryPattern.replaceAll( + Pattern.quote(String.format("?{%s}", variableMapping.get(s).substring(1))), + String.format("?{%s}", s.substring(1))); + + queryPattern = queryPattern.replaceAll( + Pattern.quote(variableMapping.get(s)), s); + sensorQueryRule = sensorQueryRule.replaceAll( + Pattern.quote(variableMapping.get(s)), s); + goal = goal.replaceAll( + Pattern.quote(variableMapping.get(s)), s); + } + + print("DOING OUTPUT MAPPING REPLACEMENT: " + variableMapping); + print("======================================"); + + return new DivideQueryParserOutput( + queryPattern, sensorQueryRule, goal, output.getQueryForm()); + } + + @SuppressWarnings("unused") + private DivideQueryParserOutput correctForStreamToFinalQueryVariableMapping( + DivideQueryParserOutput result, Map finalQueryVariableMapping) { + // if no mapping exists of final query variables, then no further correction + // to the output for this mapping should be done + if (finalQueryVariableMapping == null || finalQueryVariableMapping.isEmpty()) { + return result; + } + + // a correction is only required for SELECT queries, since this output contains + // bindings to actual variable names + // -> these names should equal the original names that were used in the + // final query (for full transparency) + // (note that we definitely know the input contained a final query, otherwise + // the final query variable mapping would be always empty) + if (result.getQueryForm() == QueryForm.SELECT) { + print("DOING ADDITIONAL CORRECTION IN SELECT QUERY FOR " + + "STREAM TO FINAL QUERY VARIABLE MAPPING"); + + // extract query body from query pattern + String queryPattern = result.getQueryPattern(); + String queryBody = extractRspQlQueryBodyFromQueryPattern( + queryPattern, result.getQueryForm()); + + // only proceed if nothing went wrong with extracting the query body + // -> otherwise return original result (better to have that result than + // no result of course) + if ("".equals(queryBody) || queryBody == null) { + return result; + } + + // now replacements can be prepared + // -> first generate set of all possible conflicting variables + // (conflicting for replacement later on) + // -> these are the keys and values of the mapping + // -> and also any other unbound variables occurring in the query body + Set conflictingVariables = new HashSet<>(finalQueryVariableMapping.keySet()); + conflictingVariables.addAll(finalQueryVariableMapping.values()); + List rspQlQueryUnboundVariables = findUnboundVariables(queryBody); + conflictingVariables.addAll(rspQlQueryUnboundVariables); + + // split replacement list in two to first do some temporal replacements + // -> these replacements will be done first before doing the actual replacements + // -> this is to avoid that conflicts occur with similar variables + // -> this works if the resulting variables after replacement are unique, i.e., + // they do not occur as such in the list of variables or as a substring of any + // of these variables (or vice versa) + Map temporalReplacements = new HashMap<>(); + Map finalReplacements = new HashMap<>(); + for (Map.Entry requiredReplacement : finalQueryVariableMapping.entrySet()) { + // replacement is not required if: + // - key and value are identical + // - RSP-QL query body does not contain the key + if (requiredReplacement.getKey().equals(requiredReplacement.getValue()) || + !rspQlQueryUnboundVariables.contains(requiredReplacement.getKey())) { + continue; + } + + // first check if replacement is allowed + // -> this is not the case if the result of the replacement is already present + // as a variable in the RSP-QL query body + if (rspQlQueryUnboundVariables.contains(requiredReplacement.getValue())) { + print(String.format("Cannot do replacement of %s to %s since the result " + + "is already present as a variable in the RSP-QL query body", + requiredReplacement.getKey(), requiredReplacement.getValue())); + continue; + } + + String temporalVariable = ""; + boolean variableAccepted = false; + while (!variableAccepted) { + String triedNewVariable = generateRandomUnboundVariable(); + variableAccepted = conflictingVariables + .stream() + .noneMatch(s -> s.equals(triedNewVariable) || + s.contains(triedNewVariable) || + triedNewVariable.contains(s)); + if (variableAccepted) { + temporalVariable = triedNewVariable; + conflictingVariables.add(triedNewVariable); + } + } + + // split up replacements + temporalReplacements.put( + requiredReplacement.getKey(), temporalVariable); + finalReplacements.put( + temporalVariable, requiredReplacement.getValue()); + } + + print("Temporal replacements: " + temporalReplacements); + print("Final replacements: " + finalReplacements); + + // first do temporal replacements + List sortedTemporalReplacementKeys = temporalReplacements.keySet() + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + String newQueryBody = queryBody; + for (String key : sortedTemporalReplacementKeys) { + newQueryBody = newQueryBody.replaceAll( + Pattern.quote(key), temporalReplacements.get(key)); + } + + // then also do final replacements + List finalTemporalReplacementKeys = finalReplacements.keySet() + .stream() + .sorted((s1, s2) -> s1.contains(s2) ? + (s1.equals(s2) ? 0 : -1) : + (s2.contains(s1) ? 1 : s1.compareTo(s2))) + .collect(Collectors.toList()); + for (String key : finalTemporalReplacementKeys) { + newQueryBody = newQueryBody.replaceAll( + Pattern.quote(key), finalReplacements.get(key)); + } + + print("Old query body: " + queryBody); + print("New query body: " + newQueryBody); + print("======================================"); + + queryPattern = queryPattern.replaceAll( + Pattern.quote(queryBody), newQueryBody); + return new DivideQueryParserOutput( + queryPattern, result.getSensorQueryRule(), + result.getGoal(), result.getQueryForm()); + + } else { // CONSTRUCT, DESCRIBE or ASK query + // -> no adaptations to result part are required anymore, since the result + // contains no bindings to actual variable names, to these names do not + // need to be identical to the names in the original final query + return result; + } + } + + private String extractRspQlQueryBodyFromQueryPattern(String queryPattern, + QueryForm queryForm) { + final Pattern pattern = Pattern.compile( + String.format(":%s \"\"\"(\\n|\\r|.)+\"\"\"\\.", + queryForm.toString().toLowerCase())); + Matcher m = pattern.matcher(queryPattern); + if (m.find()) { + return m.group(); + } else { + return ""; + } + } + + private void print(String text) { + if (DEBUG) { + System.out.println(text); + } + } + + + + // TESTING METHODS + + public static void main(String[] args) throws Exception { + DivideQueryParser parser = new DivideQueryParser(); + Map defaultWindowParameters = new HashMap<>(); + defaultWindowParameters.put("?seconds", "123M"); + String wd = "FROM NOW-?{seconds} TO NOW-PT?{otherSeconds}M STEP PT10S"; + StreamWindow sw = new StreamWindow("http://stream.test", wd, defaultWindowParameters); + ParsedStreamWindow w = parser.parseStreamWindow(sw, Collections.emptyMap()); + ConvertedStreamWindow converted = parser.convertParsedStreamWindow(w); + + System.out.println(sw); + System.out.println(w); + System.out.println(converted); + + /*Matcher m = RSP_QL_WINDOW_PARAMETERS_PATTERN.matcher(wd); + m.find(); + + String range = m.group(2); + String fromTo = m.group(4); + String step = m.group(8); + if (range != null) { + String rangeParam = m.group(3); + + } else if (fromTo != null) { + String fromParam = m.group(5); + String toParam = m.group(6); + + } else { + throw new InvalidDivideQueryParserInputException("CANNOT MATCH??"); + } + + if (step != null) { + String stepParam = m.group(9); + System.out.println(stepParam); + } + + System.out.println("1:" + m.group(1)); + System.out.println("2:" + m.group(2)); + System.out.println("3:" + m.group(3)); + System.out.println("4:" + m.group(4)); + System.out.println("5:" + m.group(5)); + System.out.println("6:" + m.group(6)); + System.out.println("7:" + m.group(7)); + System.out.println("8:" + m.group(8)); + System.out.println("9:" + m.group(9)); + System.out.println("10:" + m.group(10)); + System.out.println("11:" + m.group(11)); + System.out.println("12:" + m.group(12)); + System.out.println("13:" + m.group(13)); + System.out.println("14:" + m.group(14)); + System.out.println("15:" + m.group(15)); + System.out.println("16:" + m.group(16)); + System.out.println("17:" + m.group(17)); + System.out.println("18:" + m.group(18)); + System.out.println("19:" + m.group(19)); + System.out.println("20:" + m.group(20)); + System.out.println("21:" + m.group(21)); + System.out.println("22:" + m.group(22)); + System.out.println("23:" + m.group(23));*/ + + /*DivideQueryParserOutput divideQuery = singleSelectSparqlCase(); + + System.out.println("Resulting DIVIDE query:"); + System.out.println("\nGOAL:\n" + divideQuery.getGoal()); + System.out.println("\nPATTERN:\n" + divideQuery.getQueryPattern()); + System.out.println("\nSENSOR QUERY RULE:\n" + divideQuery.getSensorQueryRule());*/ + } + + @SuppressWarnings("unused") + private static void testStreamToFinalQueryVariableMapping() + throws InvalidDivideQueryParserInputException{ + DivideQueryParser parser = new DivideQueryParser(); + + List streamGraphs = new ArrayList<>(); + streamGraphs.add(new StreamWindow("", + "RANGE PT?{seconds}S STEP PT3S", + Collections.emptyMap())); + List intermediateQueries = new ArrayList<>(); + + Map mapping = new HashMap<>(); + mapping.put("?a", "?b"); + mapping.put("?b", "?c"); + + String streamQuery = + "?a test:K ?b .\n" + + "?c test:L ?d ."; + String finalQuery = "?a test:K ?b . ?c test:L ?d . ?ba test:M ?cd ."; + + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.SPARQL, + streamGraphs, + streamQuery, + intermediateQueries, + finalQuery, + "LIMIT 1", + mapping); + + parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput singleSelectSparqlCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List streamGraphs = new ArrayList<>(); + streamGraphs.add(new StreamWindow("", + "RANGE PT?{seconds}S STEP PT3S", + Collections.emptyMap())); + List intermediateQueries = new ArrayList<>(); + Map mapping = new HashMap<>(); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.SPARQL, + streamGraphs, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/dissect-case/divide-queries/" + + "3-no-activity-alarm-select/sparql-input/stream-query.query"), + intermediateQueries, + null, + "LIMIT 1", + mapping); + + return parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput singleSelectRspQlCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List intermediateQueries = new ArrayList<>(); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.RSP_QL, + null, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/dissect-case/divide-queries/" + + "3-no-activity-alarm-select/rspql-input/stream-query.query"), + intermediateQueries, + null, + "LIMIT 1", + new HashMap<>()); + + return parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput sparqlDescribeCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List streamGraphs = new ArrayList<>(); + streamGraphs.add(new StreamWindow("", "RANGE PT5S STEP PT3S", + Collections.emptyMap())); + List intermediateQueries = new ArrayList<>(); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.SPARQL, + streamGraphs, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/dissect-case/divide-queries/" + + "1-above-threshold-alarm-describe/sparql-input/stream-query.query"), + intermediateQueries, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/dissect-case/divide-queries/" + + "1-above-threshold-alarm-describe/sparql-input/final-query.query"), + "ORDER BY DESC(?t) LIMIT 1", + new HashMap<>()); + + return parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput sparqlAskCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List streamGraphs = new ArrayList<>(); + streamGraphs.add(new StreamWindow("", "RANGE PT5S STEP PT3S", + Collections.emptyMap())); + List intermediateQueries = new ArrayList<>(); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.SPARQL, + streamGraphs, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/dissect-case/divide-queries/" + + "1-above-threshold-alarm-ask/sparql-input/stream-query.query"), + intermediateQueries, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/dissect-case/divide-queries/" + + "1-above-threshold-alarm-ask/sparql-input/final-query.query"), + "ORDER BY DESC(?t) LIMIT 1", + new HashMap<>()); + + return parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput singleSparqlQueryCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List streamGraphs = new ArrayList<>(); + streamGraphs.add(new StreamWindow("", + "FROM NOW-PT35M TO NOW-PT5M STEP PT5S", + Collections.emptyMap())); + streamGraphs.add(new StreamWindow("", + "RANGE PT5S TUMBLING", + Collections.emptyMap())); + List intermediateQueries = new ArrayList<>(); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.SPARQL, + streamGraphs, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/query_matching/" + + "case_noAdditionalReasoning/case_1_concussion/input-query-converted.query"), + intermediateQueries, + null, + null, + new HashMap<>()); + + return parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput doubleSparqlQueryCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List streamGraphs = new ArrayList<>(); + streamGraphs.add(new StreamWindow("", + "RANGE PT5S STEP PT1S", + Collections.emptyMap())); + List intermediateQueries = new ArrayList<>(); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.SPARQL, + streamGraphs, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/query_matching/" + + "case_additionalReasoning/case_1_concussion/input-query-converted.query"), + intermediateQueries, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/query_matching/" + + "case_additionalReasoning/case_1_concussion/streamfox/reasoning-query-2.query"), + null, + new HashMap<>()); + + return parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput tripleSparqlQueryCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List streamGraphs = new ArrayList<>(); + streamGraphs.add(new StreamWindow("", + "RANGE PT5S STEP PT1S", + Collections.emptyMap())); + List intermediateQueries = new ArrayList<>(); + intermediateQueries.add(IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/query_matching/" + + "case_additionalReasoning/case_2_concussion/streamfox/reasoning-query-2.query")); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.SPARQL, + streamGraphs, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/query_matching/" + + "case_additionalReasoning/case_2_concussion/input-query-converted.query"), + intermediateQueries, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/query_matching/" + + "case_additionalReasoning/case_2_concussion/streamfox/reasoning-query-3.query"), + "GROUP BY ?v LIMIT 2", + new HashMap<>()); + + return parser.parseDivideQuery(input); + } + + @SuppressWarnings("unused") + private static DivideQueryParserOutput singleRspQlQueryCase() + throws InvalidDivideQueryParserInputException { + DivideQueryParser parser = new DivideQueryParser(); + + List intermediateQueries = new ArrayList<>(); + DivideQueryParserInput input = new DivideQueryParserInput( + InputQueryLanguage.RSP_QL, + null, + IOUtilities.readFileIntoString( + "/home/mathias/Github/divide/divide-protego/query_matching/" + + "case_noAdditionalReasoning/case_1_concussion/input-query-converted_RSPQL.query"), + intermediateQueries, + null, + null, + new HashMap<>()); + + return parser.parseDivideQuery(input); + } + + @Override + public String getTurtlePrefixList(Set prefixes) { + return divideQueryGenerator.getTurtlePrefixList(prefixes); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserFactory.java new file mode 100644 index 0000000..d432a7b --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserFactory.java @@ -0,0 +1,32 @@ +package be.ugent.idlab.divide.core.query.parser; + +public class DivideQueryParserFactory { + + private static IDivideQueryParser instance; + + /** + * @param processUnmappedVariableMatchesInParser boolean representing whether variable + * matches in the input for the DIVIDE query + * parser that are not defined as mappings, + * should be considered as mappings by default + * @param validateUnboundVariablesInRspQlQueryBody boolean representing whether variables in + * the RSP-QL query body generated by this + * parser, should be validated (= checked for + * occurrence in the WHERE clause of the query + * or in the set of input variables that will + * be substituted during the DIVIDE query + * derivation) + * @return the DIVIDE query parser for this system runtime + */ + public static synchronized IDivideQueryParser getInstance( + boolean processUnmappedVariableMatchesInParser, + boolean validateUnboundVariablesInRspQlQueryBody) { + if (instance == null) { + instance = new DivideQueryParser( + processUnmappedVariableMatchesInParser, + validateUnboundVariablesInRspQlQueryBody); + } + return instance; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserInput.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserInput.java new file mode 100644 index 0000000..2254a2f --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserInput.java @@ -0,0 +1,186 @@ +package be.ugent.idlab.divide.core.query.parser; + +import be.ugent.idlab.util.io.IOUtilities; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +public class DivideQueryParserInput { + + private InputQueryLanguage inputQueryLanguage; + private final List streamWindows; + private String streamQuery; + private List intermediateQueries; + private String finalQuery; + private String solutionModifier; + private Map streamToFinalQueryVariableMapping; + + public DivideQueryParserInput(InputQueryLanguage inputQueryLanguage, + List streamWindows, + String streamQuery, + List intermediateQueries, + String finalQuery, + String solutionModifier, + Map streamToFinalQueryVariableMapping) { + this.inputQueryLanguage = inputQueryLanguage; + this.streamWindows = streamWindows; + this.streamQuery = streamQuery; + this.intermediateQueries = intermediateQueries; + this.finalQuery = finalQuery; + this.solutionModifier = solutionModifier; + this.streamToFinalQueryVariableMapping = streamToFinalQueryVariableMapping; + } + + public void validate() throws InvalidDivideQueryParserInputException { + // input query language should be defined, otherwise correctly parsing is impossible + if (inputQueryLanguage == null) { + throw new InvalidDivideQueryParserInputException( + "Input query language is not specified"); + } + + if (inputQueryLanguage == InputQueryLanguage.RSP_QL) { + // for RSP-QL queries, stream windows are already present in the query itself + // => they should only be defined as a separate entry in the input if the input + // defines any default values for the window parameter variables + if (streamWindows != null && !streamWindows.isEmpty()) { + if (!streamWindows.stream().allMatch(streamWindow -> + streamWindow.getStreamIri() != null && + streamWindow.getDefaultWindowParameterValues() != null && + !streamWindow.getDefaultWindowParameterValues().isEmpty())) { + throw new InvalidDivideQueryParserInputException( + "Stream windows should only be specified for an RSP-QL query if they " + + "contain the stream IRI and a non-empty list of default window " + + "parameter values - otherwise you should only define them in " + + "the RSP-QL stream query"); + } + } + } else if (inputQueryLanguage == InputQueryLanguage.SPARQL) { + // for SPARQL queries, no window parameters are present yet in the main stream + // SPARQL query (only an IRI) + // => stream windows should be specified explicitly as a separate entry in the + // input, to ensure that stream IRIs can be mapped on the correct window parameters + if (streamWindows == null || streamWindows.isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "No names & window parameters specified of the stream graph IRI(s)"); + } + + // there may not be any stream window which is not fully specified + if (!streamWindows.stream().allMatch(StreamWindow::isValid)) { + throw new InvalidDivideQueryParserInputException( + "Some of the defined stream windows are incomplete or invalid"); + } + } + + // stream query should always be present (both for SPARQL & RSP-QL case) + if (streamQuery == null || streamQuery.trim().isEmpty()) { + throw new InvalidDivideQueryParserInputException( + "No stream query specified"); + } + + // for an RSP-QL query, no additional queries can be specified anymore + // except for the stream query => these inputs should be empty + if (inputQueryLanguage == InputQueryLanguage.RSP_QL && ( + (intermediateQueries != null && !intermediateQueries.isEmpty()) || + (finalQuery != null && !finalQuery.trim().isEmpty()))) { + throw new InvalidDivideQueryParserInputException( + "Final and/or intermediate queries are specified, which is not " + + "possible if the input query language is RSP-QL"); + } + + // not any of the intermediate queries can be null + if (intermediateQueries != null && + intermediateQueries.stream().anyMatch(s -> s == null || s.isEmpty())) { + throw new InvalidDivideQueryParserInputException( + "Some of the intermediate queries are invalid or empty"); + } + + // a variable mapping between a stream and final query can only be provided + // if a final query is present (= only possible if input language is SPARQL) + if ((streamToFinalQueryVariableMapping != null && + !streamToFinalQueryVariableMapping.isEmpty()) && + (inputQueryLanguage != InputQueryLanguage.SPARQL || + finalQuery == null || finalQuery.trim().isEmpty())) { + throw new InvalidDivideQueryParserInputException( + "A variable mapping from stream to final query can only be provided if the " + + "input query language is SPARQL and if a final query is specified"); + } + } + + public void preprocess() { + // all queries are preprocessed to ensure correct parsing + this.streamQuery = preprocessQuery(this.streamQuery); + if (this.intermediateQueries != null) { + this.intermediateQueries = this.intermediateQueries + .stream() + .filter(Objects::nonNull) + .map(this::preprocessQuery) + .collect(Collectors.toList()); + } + if (this.finalQuery != null) { + this.finalQuery = preprocessQuery(this.finalQuery); + } + if (this.solutionModifier != null && !this.solutionModifier.trim().isEmpty()) { + this.solutionModifier = preprocessQuery(solutionModifier) + " "; + } else { + this.solutionModifier = ""; + } + + // it is ensured a mapping is always available, possibly empty + if (this.streamToFinalQueryVariableMapping == null) { + this.streamToFinalQueryVariableMapping = new HashMap<>(); + } + } + + private String preprocessQuery(String query) { + return IOUtilities.removeWhiteSpace(query).replace("\r", "").trim(); + } + + public InputQueryLanguage getInputQueryLanguage() { + return inputQueryLanguage; + } + + public void setInputQueryLanguage(InputQueryLanguage inputQueryLanguage) { + this.inputQueryLanguage = inputQueryLanguage; + } + + public List getStreamWindows() { + return streamWindows; + } + + public String getStreamQuery() { + return streamQuery; + } + + public List getIntermediateQueries() { + return intermediateQueries; + } + + public String getFinalQuery() { + return finalQuery; + } + + public String getSolutionModifier() { + return solutionModifier; + } + + public Map getStreamToFinalQueryVariableMapping() { + return streamToFinalQueryVariableMapping; + } + + @Override + public String toString() { + return "DivideQueryParserInput{" + + "inputQueryLanguage=" + inputQueryLanguage + + ", streamWindows=" + streamWindows + + ", streamQuery='" + streamQuery + '\'' + + ", intermediateQueries=" + intermediateQueries + + ", finalQuery='" + finalQuery + '\'' + + ", solutionModifier='" + solutionModifier + '\'' + + ", streamToFinalQueryVariableMapping=" + streamToFinalQueryVariableMapping + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserOutput.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserOutput.java new file mode 100644 index 0000000..6ed215e --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/DivideQueryParserOutput.java @@ -0,0 +1,43 @@ +package be.ugent.idlab.divide.core.query.parser; + +public class DivideQueryParserOutput { + + private final String queryPattern; + private final String sensorQueryRule; + private final String goal; + + private final QueryForm queryForm; + + public DivideQueryParserOutput(String queryPattern, + String sensorQueryRule, + String goal, + QueryForm queryForm) { + this.queryPattern = queryPattern; + this.sensorQueryRule = sensorQueryRule; + this.goal = goal; + this.queryForm = queryForm; + } + + public String getQueryPattern() { + return queryPattern; + } + + public String getSensorQueryRule() { + return sensorQueryRule; + } + + public String getGoal() { + return goal; + } + + public QueryForm getQueryForm() { + return queryForm; + } + + public boolean isNonEmpty() { + return queryPattern != null && !queryPattern.isEmpty() && + sensorQueryRule != null && !sensorQueryRule.isEmpty() && + goal != null && !goal.isEmpty(); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/Graph.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/Graph.java new file mode 100644 index 0000000..7f34bd5 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/Graph.java @@ -0,0 +1,29 @@ +package be.ugent.idlab.divide.core.query.parser; + +class Graph { + + private final String name; + private final String clause; + + Graph(String name, String clause) { + this.name = name; + this.clause = clause; + } + + String getName() { + return name; + } + + String getClause() { + return clause; + } + + @Override + public String toString() { + return "Graph{" + + "name='" + name + '\'' + + ", clause='" + clause + '\'' + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/IDivideQueryParser.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/IDivideQueryParser.java new file mode 100644 index 0000000..f4aed66 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/IDivideQueryParser.java @@ -0,0 +1,20 @@ +package be.ugent.idlab.divide.core.query.parser; + +import be.ugent.idlab.divide.core.context.ContextEnrichment; + +import java.util.Set; + +public interface IDivideQueryParser { + + DivideQueryParserOutput parseDivideQuery(DivideQueryParserInput input) + throws InvalidDivideQueryParserInputException; + + void validateDivideQueryContextEnrichment(ContextEnrichment contextEnrichment) + throws InvalidDivideQueryParserInputException; + + ParsedSparqlQuery parseSparqlQuery(String query) + throws InvalidDivideQueryParserInputException; + + String getTurtlePrefixList(Set prefixes); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/InputQueryLanguage.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/InputQueryLanguage.java new file mode 100644 index 0000000..5f87ffc --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/InputQueryLanguage.java @@ -0,0 +1,7 @@ +package be.ugent.idlab.divide.core.query.parser; + +public enum InputQueryLanguage { + + SPARQL, RSP_QL + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/InvalidDivideQueryParserInputException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/InvalidDivideQueryParserInputException.java new file mode 100644 index 0000000..f28bef1 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/InvalidDivideQueryParserInputException.java @@ -0,0 +1,13 @@ +package be.ugent.idlab.divide.core.query.parser; + +public class InvalidDivideQueryParserInputException extends Exception { + + public InvalidDivideQueryParserInputException(String description, Exception base) { + super(description, base); + } + + public InvalidDivideQueryParserInputException(String description) { + super(description); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/MappedDivideQueryParserInput.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/MappedDivideQueryParserInput.java new file mode 100644 index 0000000..566fa1b --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/MappedDivideQueryParserInput.java @@ -0,0 +1,43 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class MappedDivideQueryParserInput extends DivideQueryParserInput { + + private final Map finalQueryVariableMapping; + + MappedDivideQueryParserInput(InputQueryLanguage inputQueryLanguage, + List streamWindows, + String streamQuery, + List intermediateQueries, + String finalQuery, + String solutionModifier, + Map finalQueryVariableMapping) { + super(inputQueryLanguage, streamWindows, streamQuery, intermediateQueries, + finalQuery, solutionModifier, new HashMap<>()); + + this.finalQueryVariableMapping = new HashMap<>(); + for (Map.Entry entry : finalQueryVariableMapping.entrySet()) { + this.finalQueryVariableMapping.put(entry.getValue(), entry.getKey()); + } + } + + MappedDivideQueryParserInput(DivideQueryParserInput input) { + super(input.getInputQueryLanguage(), + input.getStreamWindows(), + input.getStreamQuery(), + input.getIntermediateQueries(), + input.getFinalQuery(), + input.getSolutionModifier(), + input.getStreamToFinalQueryVariableMapping()); + + this.finalQueryVariableMapping = new HashMap<>(); + } + + public Map getFinalQueryVariableMapping() { + return finalQueryVariableMapping; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedSparqlQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedSparqlQuery.java new file mode 100644 index 0000000..969a4ae --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedSparqlQuery.java @@ -0,0 +1,32 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.Set; + +public class ParsedSparqlQuery { + + private final SplitSparqlQuery splitSparqlQuery; + private final Set prefixes; + + public ParsedSparqlQuery(SplitSparqlQuery splitSparqlQuery, + Set prefixes) { + this.splitSparqlQuery = splitSparqlQuery; + this.prefixes = prefixes; + } + + public SplitSparqlQuery getSplitSparqlQuery() { + return splitSparqlQuery; + } + + public Set getPrefixes() { + return prefixes; + } + + @Override + public String toString() { + return "ParsedSparqlQuery{" + + "splitSparqlQuery=" + splitSparqlQuery + + ", prefixes=" + prefixes + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedStreamQueryWhereClause.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedStreamQueryWhereClause.java new file mode 100644 index 0000000..a3f787a --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedStreamQueryWhereClause.java @@ -0,0 +1,32 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.List; + +class ParsedStreamQueryWhereClause { + + private final String contextPart; + private final List streamItems; + + ParsedStreamQueryWhereClause(String contextPart, + List streamItems) { + this.contextPart = contextPart; + this.streamItems = streamItems; + } + + String getContextPart() { + return contextPart; + } + + List getStreamItems() { + return streamItems; + } + + @Override + public String toString() { + return "ParsedStreamQueryWhereClause{\n" + + "contextPart='" + contextPart + '\'' + + ",\nstreamItems=" + streamItems + + "\n}"; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedStreamWindow.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedStreamWindow.java new file mode 100644 index 0000000..ba05ae2 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/ParsedStreamWindow.java @@ -0,0 +1,33 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.Map; +import java.util.Set; + +public class ParsedStreamWindow extends StreamWindow { + + private final Set unboundVariables; + + public ParsedStreamWindow(String streamIri, + String windowDefinition, + Map defaultWindowParameterValues, + Set unboundVariables) { + super(streamIri, windowDefinition, defaultWindowParameterValues); + + this.unboundVariables = unboundVariables; + } + + public Set getUnboundVariables() { + return unboundVariables; + } + + @Override + public String toString() { + return "ParsedStreamWindow{" + + "streamIri='" + streamIri + '\'' + + ", windowDefinition='" + windowDefinition + '\'' + + ", unboundVariables=" + unboundVariables + + ", defaultWindowParameterValues=" + defaultWindowParameterValues + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/Prefix.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/Prefix.java new file mode 100644 index 0000000..ca76719 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/Prefix.java @@ -0,0 +1,48 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.Objects; + +public class Prefix { + + private final String name; + private final String uri; + + Prefix(String name, String uri) { + this.name = name; + this.uri = uri; + } + + public String getName() { + return name; + } + + public String getUri() { + return uri; + } + + @Override + public String toString() { + return "Prefix{" + + "name='" + name + '\'' + + ", uri='" + uri + '\'' + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Prefix prefix = (Prefix) o; + return name.equals(prefix.name) && uri.equals(prefix.uri); + } + + @Override + public int hashCode() { + return Objects.hash(name, uri); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/QueryForm.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/QueryForm.java new file mode 100644 index 0000000..f359563 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/QueryForm.java @@ -0,0 +1,16 @@ +package be.ugent.idlab.divide.core.query.parser; + +public enum QueryForm { + + CONSTRUCT, SELECT, ASK, DESCRIBE; + + static QueryForm fromString(String name) { + for (QueryForm queryForm : QueryForm.values()) { + if (queryForm.name().equalsIgnoreCase(name)) { + return queryForm; + } + } + return null; + } + +} \ No newline at end of file diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/RspQlQueryBody.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/RspQlQueryBody.java new file mode 100644 index 0000000..02b6533 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/RspQlQueryBody.java @@ -0,0 +1,46 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.Set; + +public class RspQlQueryBody { + + private final String queryBody; + private final Set unboundVariables; + + private final QueryForm queryForm; + private final String resultPart; + private final String wherePart; + + public RspQlQueryBody(String queryBody, + Set unboundVariables, + QueryForm queryForm, + String resultPart, + String wherePart) { + this.queryBody = queryBody; + this.unboundVariables = unboundVariables; + this.queryForm = queryForm; + this.resultPart = resultPart; + this.wherePart = wherePart; + } + + public String getQueryBody() { + return queryBody; + } + + public Set getUnboundVariables() { + return unboundVariables; + } + + public QueryForm getQueryForm() { + return queryForm; + } + + public String getResultPart() { + return resultPart; + } + + public String getWherePart() { + return wherePart; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/SplitSparqlQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/SplitSparqlQuery.java new file mode 100644 index 0000000..7249d39 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/SplitSparqlQuery.java @@ -0,0 +1,62 @@ +package be.ugent.idlab.divide.core.query.parser; + +public class SplitSparqlQuery { + + private final String prefixPart; + private final QueryForm queryForm; + private final String resultPart; + private final String fromPart; + private final String wherePart; + private final String finalPart; + + SplitSparqlQuery(String prefixPart, + QueryForm queryForm, + String resultPart, + String fromPart, + String wherePart, + String finalPart) { + this.prefixPart = prefixPart; + this.queryForm = queryForm; + this.resultPart = resultPart; + this.fromPart = fromPart; + this.wherePart = wherePart; + this.finalPart = finalPart; + } + + public String getPrefixPart() { + return prefixPart; + } + + public QueryForm getQueryForm() { + return queryForm; + } + + public String getResultPart() { + return resultPart; + } + + public String getFromPart() { + return fromPart; + } + + public String getWherePart() { + return wherePart; + } + + public String getFinalPart() { + return finalPart; + } + + @Override + public String toString() { + return "SplitSparqlQuery{\n" + + "prefixPart='" + prefixPart + '\'' + + ",\nqueryForm=" + queryForm + + ",\nresultPart='" + resultPart + '\'' + + ",\nfromPart='" + fromPart + '\'' + + ",\nwherePart='" + wherePart + '\'' + + ",\nfinalPart='" + finalPart + '\'' + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/StreamWindow.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/StreamWindow.java new file mode 100644 index 0000000..0d66d08 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/StreamWindow.java @@ -0,0 +1,59 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.HashMap; +import java.util.Map; + +public class StreamWindow { + + protected String streamIri; + protected String windowDefinition; + protected Map defaultWindowParameterValues; + + public StreamWindow(String streamIri, + String windowDefinition, + Map defaultWindowParameterValues) { + if (streamIri != null) { + streamIri = streamIri.trim(); + } + this.streamIri = streamIri; + this.windowDefinition = windowDefinition; + this.defaultWindowParameterValues = defaultWindowParameterValues; + } + + public StreamWindow(String streamIri, + String windowDefinition) { + this.streamIri = streamIri; + this.windowDefinition = windowDefinition; + this.defaultWindowParameterValues = new HashMap<>(); + } + + public String getStreamIri() { + return streamIri; + } + + public String getWindowDefinition() { + return windowDefinition; + } + + public Map getDefaultWindowParameterValues() { + return defaultWindowParameterValues; + } + + public void setDefaultWindowParameterValues(Map defaultWindowParameterValues) { + this.defaultWindowParameterValues = defaultWindowParameterValues; + } + + boolean isValid() { + return streamIri != null && windowDefinition != null; + } + + @Override + public String toString() { + return "StreamWindow{" + + "streamIri='" + streamIri + '\'' + + ", windowDefinition='" + windowDefinition + '\'' + + ", defaultWindowParameterValues=" + defaultWindowParameterValues + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClause.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClause.java new file mode 100644 index 0000000..730e5bf --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClause.java @@ -0,0 +1,24 @@ +package be.ugent.idlab.divide.core.query.parser; + +import java.util.List; + +class WhereClause { + + private final List items; + + WhereClause(List items) { + this.items = items; + } + + List getItems() { + return items; + } + + @Override + public String toString() { + return "WhereClause{" + + "items=" + items + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseExpressionItem.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseExpressionItem.java new file mode 100644 index 0000000..e88d2a7 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseExpressionItem.java @@ -0,0 +1,28 @@ +package be.ugent.idlab.divide.core.query.parser; + +class WhereClauseExpressionItem extends WhereClauseItem { + + private final String expression; + + WhereClauseExpressionItem(String expression) { + super(WhereClauseItemType.EXPRESSION); + this.expression = expression; + } + + String getExpression() { + return expression; + } + + @Override + String getClause() { + return expression; + } + + @Override + public String toString() { + return "WhereClauseExpressionItem{" + + "expression='" + expression + '\'' + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseGraphItem.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseGraphItem.java new file mode 100644 index 0000000..26add55 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseGraphItem.java @@ -0,0 +1,28 @@ +package be.ugent.idlab.divide.core.query.parser; + +class WhereClauseGraphItem extends WhereClauseItem { + + private final Graph graph; + + WhereClauseGraphItem(Graph graph) { + super(WhereClauseItemType.GRAPH); + this.graph = graph; + } + + Graph getGraph() { + return graph; + } + + @Override + String getClause() { + return graph.getClause(); + } + + @Override + public String toString() { + return "WhereClauseGraphItem{\n" + + "graph=" + graph + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseItem.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseItem.java new file mode 100644 index 0000000..0418ecd --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseItem.java @@ -0,0 +1,17 @@ +package be.ugent.idlab.divide.core.query.parser; + +abstract class WhereClauseItem { + + protected final WhereClauseItemType itemType; + + WhereClauseItem(WhereClauseItemType itemType) { + this.itemType = itemType; + } + + WhereClauseItemType getItemType() { + return itemType; + } + + abstract String getClause(); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseItemType.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseItemType.java new file mode 100644 index 0000000..d093c48 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WhereClauseItemType.java @@ -0,0 +1,7 @@ +package be.ugent.idlab.divide.core.query.parser; + +enum WhereClauseItemType { + + GRAPH, EXPRESSION + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WindowParameter.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WindowParameter.java new file mode 100644 index 0000000..24016f8 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/core/query/parser/WindowParameter.java @@ -0,0 +1,56 @@ +package be.ugent.idlab.divide.core.query.parser; + +import javax.annotation.Nonnull; + +public class WindowParameter { + + enum WindowParameterType { + XSD_DURATION, + TIME_SECONDS, + TIME_MINUTES, + TIME_HOURS + } + + private final String variable; + private final String value; + private final WindowParameterType type; + private final boolean isValueSubstitutionVariable; + + public WindowParameter(String variable, + String value, + WindowParameterType type, + boolean isValueSubstitutionVariable) { + this.variable = variable; + this.value = value; + this.type = type; + this.isValueSubstitutionVariable = isValueSubstitutionVariable; + } + + @Nonnull + public String getVariable() { + return variable; + } + + public String getValue() { + return value; + } + + public WindowParameterType getType() { + return type; + } + + public boolean isValueSubstitutionVariable() { + return isValueSubstitutionVariable; + } + + @Override + public String toString() { + return "WindowParameter{" + + "variable='" + variable + '\'' + + ", value='" + value + '\'' + + ", type=" + type + + ", isValueSubstitutionVariable=" + isValueSubstitutionVariable + + '}'; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/IRspEngineHandler.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/IRspEngineHandler.java new file mode 100644 index 0000000..c523040 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/IRspEngineHandler.java @@ -0,0 +1,117 @@ +package be.ugent.idlab.divide.rsp; + +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.rsp.engine.IRspEngine; + +/** + * Handler of an RSP engine within to DIVIDE. + * Wraps the RSP engine content and allows to schedule new queries for registration, + * as well as updating the registration based on these scheduled queries. + */ +public interface IRspEngineHandler { + + /** + * Gets the wrapped RSP engine instance. + * @return the wrapped RSP engine this object is the handler of + */ + IRspEngine getRspEngine(); + + /** + * Clears list of queries that are scheduled for registration on the + * wrapped RSP engine. + */ + void clearRegistrationSchedule(); + + /** + * Clears list of queries that are scheduled for registration on the + * wrapped RSP engine, but only those that originate from the given + * DIVIDE query. + * + * @param divideQuery DIVIDE query for which the associated scheduled + * RSP queries should be removed from the scheduled list + */ + void clearRegistrationSchedule(IDivideQuery divideQuery); + + /** + * Schedules a specific query for registration on the wrapped RSP engine. + * The query will not be registered yet, but kept track of so it can be + * registered during the next call of {@link #updateRegistration()}. + * + * @param rspQLQueryBody query body to be scheduled for registration, in RSP-QL format + * @param divideQuery DIVIDE query that was instantiated into the new RSP-QL query body + */ + void scheduleForRegistration(String rspQLQueryBody, IDivideQuery divideQuery); + + /** + * Query updating routine which updates the queries registered on the wrapped RSP engine. + * To do so, it compares the currently registered queries with the queries that were + * scheduled for registration (with the {@link #scheduleForRegistration(String, IDivideQuery)} + * method) since the last call of this method. Registered queries that are not again scheduled + * for registration are unregistered from the RSP engine. Queries scheduled for + * registration that are not yet registered are registered on the RSP engine. + */ + void updateRegistration(); + + /** + * Query updating routine which updates the queries registered on the wrapped RSP engine, + * but only those originating from the given DIVIDE query. + * To do so, it compares the currently registered queries originating from this DIVIDE query, + * with the queries that were scheduled for registration, also originating from this DIVIDE + * query (with the {@link #scheduleForRegistration(String, IDivideQuery)} method) since the + * last call of this method. Registered queries that are not again scheduled + * for registration are unregistered from the RSP engine. Queries scheduled for + * registration that are not yet registered are registered on the RSP engine. + * + * @param divideQuery DIVIDE query for which the registration should be specifically updated + */ + void updateRegistration(IDivideQuery divideQuery); + + /** + * Unregisters all queries from the wrapped RSP engine that are currently registered + * via the DIVIDE query derivation. + */ + void unregisterAllQueries(); + + /** + * Unregisters the queries from the wrapped RSP engine that are currently registered + * via the DIVIDE query derivation of the specified DIVIDE query. + * + * @param query DIVIDE query of which the associated RSP engine queries need to be + * unregistered from the wrapped RSP engine + */ + void unregisterAllQueriesOriginatingFromDivideQuery(IDivideQuery query); + + /** + * Enqueues a pause request for the streams of the wrapped RSP engine. + * This pause HTTP request will ask the RSP engine to temporarily stop sending + * incoming stream events onto the internal RDF streams of the RSP engine that + * are used by the continuous queries during evaluation. Incoming stream events + * will instead be temporarily buffered, until the streams of the RSP engine are + * restarted by a restart request which can be enqueued via + * {@link #restartRspEngineStreams()}. + * The pause request is only actually sent to the RSP engine if the streams are + * not yet paused. + */ + void pauseRspEngineStreams(); + + /** + * Enqueues a restart request for the streams of the wrapped RSP engine. + * This restart HTTP request will ask the RSP engine to restart sending incoming + * stream events onto the internal RDF streams of the RSP engine that are used by + * the continuous queries during evaluation. This means that all temporarily + * buffered data during the pause period will be put on the internal RDF stream + * as well immediately after the restart. + * The restart request is only actually sent to the RSP engine if the streams are + * paused at the start time of executing the queued request. + */ + void restartRspEngineStreams(); + + /** + * Stops sending any updates (pause or restart requests) for the streams of the + * wrapped RSP engine. This means that all enqueued requests are cancelled and no + * new requests can be enqueued anymore; only the currently executing request will + * still finish its task. + */ + void stopRspEngineStreamsUpdates(); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineHandler.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineHandler.java new file mode 100644 index 0000000..a22c608 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineHandler.java @@ -0,0 +1,686 @@ +package be.ugent.idlab.divide.rsp; + +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.rsp.api.IRspEngineApiManager; +import be.ugent.idlab.divide.rsp.api.RspEngineApiInputException; +import be.ugent.idlab.divide.rsp.api.RspEngineApiManagerFactory; +import be.ugent.idlab.divide.rsp.api.RspEngineApiNetworkException; +import be.ugent.idlab.divide.rsp.api.RspEngineApiResponseException; +import be.ugent.idlab.divide.rsp.engine.IRspEngine; +import be.ugent.idlab.divide.rsp.engine.RspEngineFactory; +import be.ugent.idlab.divide.rsp.query.IRspQuery; +import be.ugent.idlab.divide.rsp.query.RspQueryFactory; +import be.ugent.idlab.divide.rsp.translate.IQueryTranslator; +import be.ugent.idlab.divide.rsp.translate.QueryTranslatorFactory; +import be.ugent.idlab.divide.util.LogConstants; +import be.ugent.idlab.util.io.IOUtilities; +import org.apache.jena.atlas.lib.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Random; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +class RspEngineHandler implements IRspEngineHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(RspEngineHandler.class.getName()); + + private final IRspEngine rspEngine; + private final List scheduledQueries; + + private final IRspEngineApiManager rspEngineApiManager; + private final IQueryTranslator queryTranslator; + + private long queryCounter; + + private final String id; + + private final RspEngineStatusHandler rspEngineStatusHandler; + + /** + * Scheduled executor used to retry query registrations that failed because + * of a network error + */ + private ScheduledThreadPoolExecutor retrialScheduledExecutor; + + private Future retrialFuture; + private final Boolean retrialFutureGuard = false; + + RspEngineHandler(RspQueryLanguage rspQueryLanguage, + String url) throws DivideInvalidInputException { + // make sure trailing '/' is removed from registration url + if (url.endsWith("/")) { + url = url.substring(0, url.length() - 1); + } + + this.rspEngine = RspEngineFactory.createInstance(rspQueryLanguage, url); + this.scheduledQueries = new ArrayList<>(); + + this.rspEngineApiManager = RspEngineApiManagerFactory.createInstance(this.rspEngine); + this.queryTranslator = QueryTranslatorFactory.createInstance(rspQueryLanguage); + + this.queryCounter = 0; + + this.id = generateAlphabeticId(); + + // create retrial executor + this.retrialScheduledExecutor = (ScheduledThreadPoolExecutor) + Executors.newScheduledThreadPool(1); + this.retrialScheduledExecutor.setRemoveOnCancelPolicy(true); + + this.rspEngineStatusHandler = new RspEngineStatusHandler(rspEngine, rspEngineApiManager); + } + + @Override + public IRspEngine getRspEngine() { + return rspEngine; + } + + @Override + public synchronized void clearRegistrationSchedule() { + LOGGER.info("Clearing all queries scheduled for registration"); + + this.scheduledQueries.clear(); + } + + @Override + public synchronized void clearRegistrationSchedule(IDivideQuery divideQuery) { + LOGGER.info("Clearing all queries scheduled for registration that " + + "originate from the DIVIDE query '{}'", divideQuery.getName()); + + this.scheduledQueries.removeIf( + rspQuery -> rspQuery.getOriginalDivideQuery().equals(divideQuery)); + } + + @Override + public synchronized void scheduleForRegistration(String rspQLQueryBody, + IDivideQuery divideQuery) { + // create unique query name + String queryName = String.format("Q%d%s", queryCounter++, id); + + LOGGER.info("Scheduling RSP-QL query with name '{}' for registration at {}: {}", + queryName, + rspEngine.getRegistrationUrl(), + IOUtilities.removeWhiteSpace(rspQLQueryBody)); + + // translate query according to RSP query language + String translatedQueryBody = queryTranslator.translateQuery( + rspQLQueryBody, queryName); + + // create an RSP query instance + IRspQuery query = RspQueryFactory.createInstance( + queryName, + // preprocess query before registration, to make sure that they appear + // in a uniform format (since the body is used for comparison between + // already scheduled and new queries). + // NOTE: it is no issue if semantically equivalent queries do not match + // string-wise - the only consequence then is that this query is + // first unregistered and then immediately re-registered by the + // updateRegistration method + preprocessQueryBeforeRegistration(translatedQueryBody), + preprocessQueryBeforeRegistration(rspQLQueryBody), + divideQuery); + + // schedule the RSP query for the next registration update + scheduledQueries.add(query); + + LOGGER.info("Query '{}' translated and scheduled for registration at {} with body: {}", + queryName, + rspEngine.getRegistrationUrl(), + query.getQueryBody()); + } + + @Override + public synchronized void updateRegistration(IDivideQuery divideQuery) { + LOGGER.info("Updating RSP engine queries associated to DIVIDE query '{}' at {}", + divideQuery.getName(), rspEngine.getRegistrationUrl()); + + // obtain currently registered queries in RSP engine + // and create copy of scheduled queries + // -> filter both on originating DIVIDE query + List previousQueries = rspEngine.getRegisteredQueries() + .stream() + .filter(rspQuery -> rspQuery.getOriginalDivideQuery().equals(divideQuery)) + .collect(Collectors.toList()); + List scheduledQueries = this.scheduledQueries + .stream() + .filter(rspQuery -> rspQuery.getOriginalDivideQuery().equals(divideQuery)) + .collect(Collectors.toList()); + + // perform registration update + List> queriesToRetry = + updateRegistration(previousQueries, scheduledQueries); + + // remove all processed schedules queries from list + this.scheduledQueries.removeAll(scheduledQueries); + + // retry queries for which the (un)registering failed due to network issue + // (but of course only if there are queries to be retried) + if (!queriesToRetry.isEmpty()) { + synchronized (this.retrialFutureGuard) { + LOGGER.info("Query update at {} for DIVIDE query '{}': " + + "rescheduling retrial of {} failed queries", + rspEngine.getRegistrationUrl(), + divideQuery.getName(), + queriesToRetry.size()); + this.retrialFuture = retrialScheduledExecutor.schedule( + new QueryRegistrationUpdateRetrialTask(queriesToRetry, 10), + 10, TimeUnit.SECONDS); + } + } else { + LOGGER.info("Finished query update for DIVIDE query '{}' at {} - no queries to retry", + divideQuery.getName(), rspEngine.getRegistrationUrl()); + } + } + + @Override + public synchronized void updateRegistration() { + LOGGER.info("Updating RSP engine queries at {}", + rspEngine.getRegistrationUrl()); + + // obtain currently registered queries in RSP engine + // and create copy of scheduled queries + List previousQueries = new ArrayList<>(rspEngine.getRegisteredQueries()); + List scheduledQueries = new ArrayList<>(this.scheduledQueries); + + // perform registration update + List> queriesToRetry = + updateRegistration(previousQueries, scheduledQueries); + + // whatever happens, all queries should be removed from the list + // of scheduled queries + // -> if registration succeeded, the reasons are obvious + // -> if registration failed, it will only be retried the next time this + // method is called, i.e., at the next context update; if the query still + // needs to be registered at that point, it will again be the output of + // the query derivation and will therefore have been added again to the + // list of scheduled queries + this.scheduledQueries.clear(); + + // retry queries for which the (un)registering failed due to network issue + // (but of course only if there are queries to be retried) + if (!queriesToRetry.isEmpty()) { + synchronized (this.retrialFutureGuard) { + LOGGER.info("Query update at {}: rescheduling retrial of {} failed queries", + rspEngine.getRegistrationUrl(), queriesToRetry.size()); + this.retrialFuture = retrialScheduledExecutor.schedule( + new QueryRegistrationUpdateRetrialTask(queriesToRetry, 10), + 10, TimeUnit.SECONDS); + } + } else { + LOGGER.info("Finished query update at {} - no queries to retry", + rspEngine.getRegistrationUrl()); + } + } + + private synchronized List> updateRegistration( + List previousQueries, List scheduledQueries) { + // stop all query retrials + stopQueryUpdateRetrials(); + + // create empty list of queries that should be retried + List> queriesToRetry = new ArrayList<>(); + + LOGGER.info("Query update at {}: scheduled query names: {} - " + + "currently registered query names: {}", + rspEngine.getRegistrationUrl(), + Arrays.toString(scheduledQueries.stream().map( + IRspQuery::getQueryName).toArray()), + Arrays.toString(previousQueries.stream().map( + IRspQuery::getQueryName).toArray())); + + // unregister previously valid queries that are no longer valid + for (IRspQuery previousQuery : previousQueries) { + // check if the previous query is scheduled again by checking if it is + // present in the list of scheduled queries + // -> if it is scheduled again, the returned boolean is true + boolean scheduledAgain = scheduledQueries.contains(previousQuery); + + // if the query is not scheduled again, it should be unregistered + // (otherwise, it can be kept registered, and nothing should be done for this query; + // except removing it from the scheduled list, which has been done in the call above) + if (!scheduledAgain) { + try { + // unregister query from RSP engine + rspEngineApiManager.unregisterQuery(previousQuery); + + // only if successful (i.e., if no exception is thrown), + // the blueprint of this RSP engine's queries is also updated + rspEngine.removeRegisteredQuery(previousQuery); + + // if a failure occurs when unregistering this query, the RSP engine's + // blueprint of queries is not updated (i.e., this query is not removed + // from the list) + // => at the following call of this method, this blueprint tells DIVIDE + // that this query is still registered on the engine, and that it + // should again be tried to unregister this query (unless by then it + // is again part of the scheduled queries) + + } catch (RspEngineApiNetworkException e) { + LOGGER.error("External network error when unregistering query '{}' at {}", + previousQuery.getQueryName(), rspEngine.getRegistrationUrl()); + + // retrying the request could potentially solve the issue since this is + // a network error (i.e., the destination could not be reached) + // -> most likely there are network connection issues + // OR the RSP engine server is down + queriesToRetry.add(Pair.create(previousQuery, false)); + + // TODO MONITOR: 28/01/2021 do something with fact that RSP engine server might be down? + + } catch (RspEngineApiResponseException e) { + LOGGER.error("External server error when unregistering query '{}' at {}", + previousQuery.getQueryName(), rspEngine.getRegistrationUrl(), e); + + // retrying the request is NOT useful, since this is an RSP engine server error + // (and the RSP engine server should ensure it can handle the registration + // requests sent by DIVIDE) + + // TODO MONITOR: 28/01/2021 do something with fact that RSP engine server cannot + // properly handle registration request? + + } catch (DivideInvalidInputException e) { + // note: this will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal URL error within DIVIDE when trying to unregister " + + "query '{}' at {}", + previousQuery.getQueryName(), rspEngine.getRegistrationUrl()); + + // retrying the request is NOT useful, since this error represents an + // internal condition that will not change + } + + } else { + LOGGER.info("Query with name '{}' is still registered as query with name '{}'", + scheduledQueries.get(scheduledQueries.indexOf(previousQuery)).getQueryName(), + previousQuery.getQueryName()); + + // remove query from scheduled queries + scheduledQueries.remove(previousQuery); + } + } + + // register newly valid queries by looping over the list of scheduled queries + // -> if a scheduled query was already registered before on the RSP engine, + // it has already been removed from the list of scheduled queries + // -> no need for any processing of the remaining items of the scheduled queries + // list, they can all simply be registered + for (IRspQuery query : scheduledQueries) { + try { + // register query to RSP engine + rspEngineApiManager.registerQuery(query); + + // only if successful (i.e., if no exception is thrown), + // the blueprint of this RSP engine's queries is also updated + rspEngine.addRegisteredQuery(query); + + // if a failure occurs when registering this query, the RSP engine's + // blueprint of queries is not updated (i.e., this query is not removed + // from the list) + // => at the following call of this method, this blueprint tells DIVIDE + // that this query is not registered yet on the engine; if it is again + // part of the scheduled queries, it should then still be registered + // (instead of ignoring it since it is already considered registered) + + } catch (RspEngineApiNetworkException e) { + LOGGER.error("External network error when registering query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + + // retrying the request could potentially solve the issue since this is + // a network error (i.e., the destination could not be reached) + // -> most likely there are network connection issues + // OR the RSP engine server is down + queriesToRetry.add(Pair.create(query, true)); + + // TODO MONITOR: 28/01/2021 do something with fact that RSP engine server might be down? + + } catch (RspEngineApiResponseException e) { + LOGGER.error("External server error when registering query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl(), e); + + // retrying the request is NOT useful, since this is an RSP engine server error + // (and the RSP engine server should ensure it can handle the registration + // requests sent by DIVIDE) + + // TODO MONITOR: 28/01/2021 do something with fact that RSP engine server cannot + // properly handle registration request? + + } catch (RspEngineApiInputException e) { + // note: DivideInvalidInputException will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal query error within DIVIDE when trying to register query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + + // retrying the request is NOT useful, since this error represents an + // internal condition that will not change + + } catch (DivideInvalidInputException e) { + // note: this will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal URL error within DIVIDE when trying to register query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + + // retrying the request is NOT useful, since this error represents an + // internal condition that will not change + } + } + + return queriesToRetry; + } + + private void stopQueryUpdateRetrials() { + // shutdown scheduled executor + // -> no new tasks can be submitted + LOGGER.info("Shutting down retrial scheduled executor"); + retrialScheduledExecutor.shutdown(); + + LOGGER.info("Trying to cancel the latest retrial task"); + synchronized (retrialFutureGuard) { + if (retrialFuture != null) { + // cancel the task, and allow for interruption while running + // -> if not started yet, it will never start + // (a shutdown does not prevent this, so this is required) + // -> if already started, the thread will be interrupted + // (throwing an InterruptedException if sleeping, and otherwise + // setting the interrupt flag so that the thread knows it can + // finish but should not reschedule a new retrial on failure) + // -> if already finished, this method will simply return + LOGGER.info("Canceling the latest retrial task"); + retrialFuture.cancel(true); + retrialFuture = null; + } + } + + // await for termination of tasks of scheduled executor + // -> the last scheduled future has been canceled, so will not + // start if it was not started yet, and will otherwise regularly + // check for its interruption and return immediately at anchor point + try { + LOGGER.info("Awaiting termination of retrial tasks"); + if (!retrialScheduledExecutor.awaitTermination(1, TimeUnit.MINUTES)) { + retrialScheduledExecutor.shutdownNow(); + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Awaiting termination not finished after 1 minute => hard shutdown"); + } + } catch (InterruptedException e) { + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Interruption while awaiting termination of retrial tasks => hard shutdown"); + retrialScheduledExecutor.shutdownNow(); + } + + // create a new retrial executor + this.retrialScheduledExecutor = (ScheduledThreadPoolExecutor) + Executors.newScheduledThreadPool(1); + this.retrialScheduledExecutor.setRemoveOnCancelPolicy(true); + } + + private String preprocessQueryBeforeRegistration(String query) { + // preprocess query before registration by removing all unnecessary whitespace + return IOUtilities.removeWhiteSpace(query).trim(); + } + + private String generateAlphabeticId() { + int leftLimit = 97; // letter 'a' + int rightLimit = 122; // letter 'z' + int targetStringLength = 5; + Random random = new Random(); + + return random.ints(leftLimit, rightLimit + 1) + .limit(targetStringLength) + .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) + .toString(); + } + + private class QueryRegistrationUpdateRetrialTask implements Runnable { + + private final Logger LOGGER = LoggerFactory.getLogger( + QueryRegistrationUpdateRetrialTask.class.getName()); + + /** + * List of pairs representing queries for which the registering or + * unregistering should be retried. Each pair consists of the query + * to be registered or unregistered, and a boolean representing + * whether the required operation is a registration or not + * (i.e., true means registering, false means unregistering) + */ + private final List> queryPairs; + + /** + * Time (in seconds) before this batch of queries was rescheduled + * for a new register/unregister attempt + */ + private final long delayBeforeRetrial; + + public QueryRegistrationUpdateRetrialTask(List> queryPairs, + long delayBeforeRetrial) { + this.queryPairs = queryPairs; + this.delayBeforeRetrial = delayBeforeRetrial; + } + + @Override + public void run() { + LOGGER.info("Starting retrial of updating registration at {} for {} queries: {}", + rspEngine.getRegistrationUrl(), + queryPairs.size(), + Arrays.toString(queryPairs.stream() + .map(Pair::getLeft) + .map(IRspQuery::getQueryName) + .toArray())); + + // create empty list of queries that should be retried + List> queriesToRetry = new ArrayList<>(); + + for (Pair queryBooleanPair : queryPairs) { + IRspQuery query = queryBooleanPair.getLeft(); + boolean register = queryBooleanPair.getRight(); + + LOGGER.info("Query update retrial at {}: retry {} {}", + rspEngine.getRegistrationUrl(), + register ? "registering" : "unregistering", + query.getQueryName()); + + try { + // register or unregister query at RSP engine + if (register) { + rspEngineApiManager.registerQuery(query); + } else { + rspEngineApiManager.unregisterQuery(query); + } + + // only if successful (i.e., if no exception is thrown), + // the blueprint of this RSP engine's queries is also updated + rspEngine.addRegisteredQuery(query); + + } catch (RspEngineApiNetworkException e) { + LOGGER.error("External network error when registering query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + + // retry once again if registration failed again + queriesToRetry.add(Pair.create(query, register)); + + } catch (RspEngineApiResponseException e) { + LOGGER.error("External server error when registering query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl(), e); + + // retrying the request is NOT useful, since this is an RSP engine server error + // (and the RSP engine server should ensure it can handle the registration + // requests sent by DIVIDE) + + // TODO MONITOR: 28/01/2021 do something with fact that RSP engine server cannot + // properly handle registration request? + + } catch (RspEngineApiInputException e) { + // note: DivideInvalidInputException will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal query error within DIVIDE when trying to register query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + + // retrying the request is NOT useful, since this error represents an + // internal condition that will not change + + } catch (DivideInvalidInputException e) { + // note: this will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal URL error within DIVIDE when trying to register query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + + // retrying the request is NOT useful, since this error represents an + // internal condition that will not change + } + + // if the thread has been interrupted, then stop going over all queries and + // immediately return (also do not schedule a new retrial task, obviously) + if (Thread.currentThread().isInterrupted()) { + LOGGER.info("Query update retrial at {}: task interrupted after updating {}", + rspEngine.getRegistrationUrl(), + query.getQueryName()); + return; + } + } + + if (!queriesToRetry.isEmpty()) { + synchronized (retrialFutureGuard) { + // only reschedule if thread has not been interrupted by now + if (!Thread.currentThread().isInterrupted()) { + LOGGER.info("Query update retrial at {}: rescheduling retrial " + + "of {} failed queries", + rspEngine.getRegistrationUrl(), queriesToRetry.size()); + + // schedule new retry, with a doubled delay + retrialFuture = retrialScheduledExecutor.schedule( + new QueryRegistrationUpdateRetrialTask( + queriesToRetry, delayBeforeRetrial * 2), + delayBeforeRetrial * 2, + TimeUnit.SECONDS); + + } else { + LOGGER.info("Query update retrial at {}: NOT rescheduling retrial " + + "of {} failed queries because of interruption", + rspEngine.getRegistrationUrl(), queriesToRetry.size()); + } + } + } else { + LOGGER.info("Finished query update retrial at {} - no queries to retry", + rspEngine.getRegistrationUrl()); + } + } + + } + + @Override + public void unregisterAllQueries() { + LOGGER.info("Unregistering all RSP engine queries at {}", + rspEngine.getRegistrationUrl()); + + // stop all query update retrials since the associated component will be unregistered + stopQueryUpdateRetrials(); + + // create new list of all registered queries + List queriesToUnregister = + new ArrayList<>(rspEngine.getRegisteredQueries()); + + if (!queriesToUnregister.isEmpty()) { + LOGGER.info("Unregistering the following queries at {}: {}", + rspEngine.getRegistrationUrl(), + Arrays.toString(queriesToUnregister.stream().map( + IRspQuery::getQueryName).toArray())); + + // unregister queries + for (IRspQuery query : queriesToUnregister) { + unregisterQuery(query); + } + + } else { + LOGGER.info("No RSP engine queries registered anymore at {}", + rspEngine.getRegistrationUrl()); + } + } + + @Override + public void unregisterAllQueriesOriginatingFromDivideQuery(IDivideQuery divideQuery) { + LOGGER.info("Unregistering RSP engine queries of DIVIDE query '{}' at {}", + divideQuery.getName(), rspEngine.getRegistrationUrl()); + + // stop all query update retrials since otherwise some new queries associated + // to this removed DIVIDE query might be re-registered + stopQueryUpdateRetrials(); + + // retrieve list of all queries associated to the given DIVIDE query + List queriesToUnregister = rspEngine.getRegisteredQueries() + .stream() + .filter(rspQuery -> divideQuery.equals(rspQuery.getOriginalDivideQuery())) + .collect(Collectors.toList()); + + if (!queriesToUnregister.isEmpty()) { + LOGGER.info("Unregistering the following queries at {}: {}", + rspEngine.getRegistrationUrl(), + Arrays.toString(queriesToUnregister.stream().map( + IRspQuery::getQueryName).toArray())); + + // unregister queries + for (IRspQuery query : queriesToUnregister) { + unregisterQuery(query); + } + + } else { + LOGGER.info("No RSP engine queries registered at {} that are associated to" + + " DIVIDE query '{}'", + rspEngine.getRegistrationUrl(), divideQuery.getName()); + } + } + + private void unregisterQuery(IRspQuery query) { + try { + // unregister query from RSP engine + rspEngineApiManager.unregisterQuery(query); + + // only if successful (i.e., if no exception is thrown), + // the blueprint of this RSP engine's queries is also updated + rspEngine.removeRegisteredQuery(query); + + // if unregistering fails, it is what it is and it should not be retried + + } catch (RspEngineApiNetworkException e) { + LOGGER.error("External network error when unregistering query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + + } catch (RspEngineApiResponseException e) { + LOGGER.error("External server error when unregistering query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl(), e); + + } catch (DivideInvalidInputException e) { + // note: this will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal URL error within DIVIDE when trying to unregister " + + "query '{}' at {}", + query.getQueryName(), rspEngine.getRegistrationUrl()); + } + } + + @Override + public synchronized void pauseRspEngineStreams() { + LOGGER.info("Pausing streams of RSP engine with base URL {}", rspEngine.getBaseUrl()); + rspEngineStatusHandler.pauseRspEngine(); + } + + @Override + public synchronized void restartRspEngineStreams() { + LOGGER.info("Restarting streams of RSP engine with base URL {}", rspEngine.getBaseUrl()); + rspEngineStatusHandler.restartRspEngine(); + } + + @Override + public void stopRspEngineStreamsUpdates() { + LOGGER.info("Stopping streams updates for RSP engine with base URL {}", rspEngine.getBaseUrl()); + rspEngineStatusHandler.stopAllTasks(); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineHandlerFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineHandlerFactory.java new file mode 100644 index 0000000..7b1be59 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineHandlerFactory.java @@ -0,0 +1,21 @@ +package be.ugent.idlab.divide.rsp; + +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; + +public class RspEngineHandlerFactory { + + /** + * Creates an {@link IRspEngineHandler} object for an RSP engine + * with the given query language and registration URL. + * @param rspQueryLanguage query language used by the RSP engine + * @param url base URL which will be used for communication with the RSP engine + * @return a new instance of {@link IRspEngineHandler} that acts as a handler of the RSP engine + * @throws DivideInvalidInputException when the query registration URL is no valid URL + */ + public static IRspEngineHandler createInstance(RspQueryLanguage rspQueryLanguage, + String url) + throws DivideInvalidInputException { + return new RspEngineHandler(rspQueryLanguage, url); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineStatusHandler.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineStatusHandler.java new file mode 100644 index 0000000..b5efacb --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspEngineStatusHandler.java @@ -0,0 +1,335 @@ +package be.ugent.idlab.divide.rsp; + +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.rsp.api.IRspEngineApiManager; +import be.ugent.idlab.divide.rsp.api.RspEngineApiNetworkException; +import be.ugent.idlab.divide.rsp.api.RspEngineApiResponseException; +import be.ugent.idlab.divide.rsp.engine.IRspEngine; +import be.ugent.idlab.divide.util.LogConstants; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.LinkedBlockingQueue; + +public class RspEngineStatusHandler { + + private static final Logger LOGGER = + LoggerFactory.getLogger(RspEngineStatusHandler.class.getName()); + + enum RspEngineStatus { + PAUSED, + RUNNING, + RESTARTING_FAILED, + UNKNOWN + } + + private RspEngineStatus status; + private int additionalNumberOfPauseRequests; + + private final IRspEngine rspEngine; + private final IRspEngineApiManager rspEngineApiManager; + + private final LinkedBlockingQueue statusUpdateQueue; + private Thread statusUpdateThread; + + private final Object guard = new Object(); + + RspEngineStatusHandler(IRspEngine rspEngine, + IRspEngineApiManager rspEngineApiManager) { + this.rspEngine = rspEngine; + this.rspEngineApiManager = rspEngineApiManager; + + this.status = RspEngineStatus.RUNNING; + this.additionalNumberOfPauseRequests = 0; + + this.statusUpdateQueue = new LinkedBlockingQueue<>(); + this.statusUpdateThread = new Thread(this::processStatusUpdates); + this.statusUpdateThread.start(); + } + + public void pauseRspEngine() { + enqueueStatusUpdateTask(new PauseRspEngineStreamsTask()); + } + + public void restartRspEngine() { + enqueueStatusUpdateTask(new RestartRspEngineStreamsTask(0)); + } + + public void stopAllTasks() { + this.statusUpdateQueue.clear(); + this.statusUpdateThread.interrupt(); + this.statusUpdateThread = null; + } + + private void enqueueStatusUpdateTask(RspEngineStatusUpdateTask statusUpdateTask) { + LOGGER.info("Trying to enqueue status update task of type {} at RSP engine " + + "with base URL {}", statusUpdateTask.getClass(), rspEngine.getBaseUrl()); + synchronized (guard) { + LOGGER.info("Status of RSP engine with base URL {} before enqueueing: {}", + rspEngine.getBaseUrl(), status); + if (status == RspEngineStatus.RESTARTING_FAILED) { + LOGGER.info("Interrupting status update thread of RSP engine with base URL {} " + + "which is retrying failed restart", rspEngine.getBaseUrl()); + this.statusUpdateThread.interrupt(); + + // consider the status running from now on, since the restart retrial is + // is interrupted so no confirmation of the status is received from the engine + this.status = RspEngineStatus.UNKNOWN; + } + + // add new status to status update queue + LOGGER.info("ENQUEUEING status update task of type {} at RSP engine " + + "with base URL {}", statusUpdateTask.getClass(), rspEngine.getBaseUrl()); + this.statusUpdateQueue.add(statusUpdateTask); + } + } + + private void processStatusUpdates() { + try { + boolean interrupted = false; + while (!interrupted) { + // retrieve the updated context from the queue - blocks if the + // queue is empty until an item again enters the queue + RspEngineStatusUpdateTask statusUpdateTask = statusUpdateQueue.take(); + + // update the status of the engine + boolean interruptedDuringUpdate = executeStatusUpdateTask(statusUpdateTask); + + // check if thread has been interrupted during status update + interrupted = interruptedDuringUpdate || Thread.currentThread().isInterrupted(); + } + + // thread is interrupted explicitly by the system, probably because + // the component is unregistered + LOGGER.info("Status update thread for RSP engine with base URL {} is found interrupted after" + + " status update, so is stopping with the processing of the status update queue", + rspEngine.getBaseUrl()); + + } catch (InterruptedException e) { + LOGGER.info("Status update thread for RSP engine with base URL {} is interrupted while waiting," + + " so is stopping with the processing of the status update queue", + rspEngine.getBaseUrl()); + } + + // set status update thread to null so that the engine knows a new thread + // should be started upon arrival of a new status update request + // (after exiting this method, the thread status will become TERMINATED) + this.statusUpdateThread = null; + + // restart the status update thread + restartStatusUpdateThreadIfNeeded(); + } + + private boolean executeStatusUpdateTask(RspEngineStatusUpdateTask statusUpdateTask) { + LOGGER.info("EXECUTING status update task of type {} at RSP engine " + + "with base URL {}", statusUpdateTask.getClass(), rspEngine.getBaseUrl()); + return statusUpdateTask.execute(); + } + + private void restartStatusUpdateThreadIfNeeded() { + // check if thread that is processing status updates is interrupted + // (in that case it will have been set to null), + // and if so, create & start new thread for this + if (this.statusUpdateThread == null) { + LOGGER.info("Status update thread for RSP engine with base URL {} has been " + + "interrupted, so a new thread is started", + rspEngine.getBaseUrl()); + + this.statusUpdateThread = new Thread(this::processStatusUpdates); + this.statusUpdateThread.start(); + } + } + + interface RspEngineStatusUpdateTask { + boolean execute(); + } + + class PauseRspEngineStreamsTask implements RspEngineStatusUpdateTask { + + private final Logger LOGGER = LoggerFactory.getLogger( + PauseRspEngineStreamsTask.class.getName()); + + @Override + public boolean execute() { + LOGGER.info("Status of RSP engine with base URL {} before executing PAUSE task: {}", + rspEngine.getBaseUrl(), status); + + if (status == RspEngineStatus.PAUSED) { + // the engine has been paused recently and has not been restarted yet + // -> this means a new context change has arrived before the change + // that actually paused the engine has completed + // -> the additional number of pause requests should be increased + additionalNumberOfPauseRequests++; + + } else { + // the engine is running, or restarting has been tried but has not + // succeeded yet, or the status is unknown + // -> in any case, it should be paused now + pauseRspEngine(); + } + + return false; + } + + private void pauseRspEngine() { + try { + // pause streams at RSP engine + rspEngineApiManager.pauseRspEngineStreams(); + + // if pausing fails, it should not be retried + + } catch (RspEngineApiNetworkException e) { + LOGGER.error("External network error when pausing streams for RSP engine " + + "with base URL {}", rspEngine.getBaseUrl()); + + } catch (RspEngineApiResponseException e) { + LOGGER.error("External server error when pausing streams for RSP engine " + + "with base URL {}", rspEngine.getBaseUrl(), e); + + } catch (DivideInvalidInputException e) { + // note: this will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal URL error within DIVIDE when trying to pause streams at for " + + "RSP engine with base URL {}", rspEngine.getBaseUrl()); + } + + synchronized (guard) { + // both when successful and when not, the status of this RSP engine is updated + // (if we end up here, the number of additional pause requests will always be 0) + status = RspEngineStatus.PAUSED; + } + } + + } + + class RestartRspEngineStreamsTask implements RspEngineStatusUpdateTask { + + private final Logger LOGGER = LoggerFactory.getLogger( + RestartRspEngineStreamsTask.class.getName()); + + private final long sleepingTime; + + public RestartRspEngineStreamsTask(long sleepingTime) { + this.sleepingTime = sleepingTime; + } + + @Override + public boolean execute() { + LOGGER.info("Status of RSP engine with base URL {} before executing RESTART task: {}", + rspEngine.getBaseUrl(), status); + + if (status == RspEngineStatus.RUNNING) { + LOGGER.info("Not restarting RSP engine with base URL {} since it is still running", + rspEngine.getBaseUrl()); + + // if engine is already running, there is no need to restart it + return false; + } + + if (status == RspEngineStatus.PAUSED && + additionalNumberOfPauseRequests > 0) { + LOGGER.info("Not restarting RSP engine with base URL {} since there are still {} " + + "additional pause requests", + rspEngine.getBaseUrl(), additionalNumberOfPauseRequests); + + // if the engine is paused and there have been additional pause requests, + // this means that this restart task is not allowed yet to actually restart + // the engine -> but the number of additional requests can be decreased + additionalNumberOfPauseRequests--; + return false; + } + + if (status == RspEngineStatus.RESTARTING_FAILED + && !statusUpdateQueue.isEmpty()) { + LOGGER.info("Not restarting RSP engine with base URL {} since this is a retrial and " + + "there are new status update requests in the queue", + rspEngine.getBaseUrl()); + + // if this is retrial of a failed restart, but there are other + // tasks in the queue by now, the retrial should not even be started + return false; + } + + // otherwise, the engine can be restarted + // (status is PAUSED and number of additional pause requests is 0) + return restartRspEngine(); + } + + private boolean restartRspEngine() { + try { + // sleep if needed (will the case for retrial tasks) + if (sleepingTime > 0) { + LOGGER.info("Sleeping for {} ms before restarting streams at RSP engine " + + "with base URL {}", sleepingTime, rspEngine.getBaseUrl()); + Thread.sleep(sleepingTime); + } + + // restart streams at RSP engine + rspEngineApiManager.restartRspEngineStreams(); + + synchronized (guard) { + // only if successful (i.e., if no exception is thrown), + // the status of this RSP engine is updated + status = RspEngineStatus.RUNNING; + } + + } catch (InterruptedException e) { + LOGGER.error("Interrupted while sleeping before retrying restarting " + + "RSP engine with base URL {}", rspEngine.getBaseUrl()); + + // return interruption + return true; + + } catch (RspEngineApiNetworkException e) { + LOGGER.error("External network error when restarting streams at " + + "RSP engine with base URL {}", rspEngine.getBaseUrl()); + + // retrying makes sense in this case + retryRestart(); + + } catch (RspEngineApiResponseException e) { + LOGGER.error("External server error when restarting streams at " + + "RSP engine with base URL {}", rspEngine.getBaseUrl()); + + } catch (DivideInvalidInputException e) { + // note: this will normally never occur + LOGGER.error(LogConstants.UNKNOWN_ERROR_MARKER, + "Internal URL error within DIVIDE when trying to restart streams " + + "RSP engine with base URL {}", rspEngine.getBaseUrl()); + } + + return false; + } + + private void retryRestart() { + synchronized (guard) { + // reschedule retrial if status update queue is currently empty + if (statusUpdateQueue.isEmpty()) { + // update status to RESTARTING_FAILED + status = RspEngineStatus.RESTARTING_FAILED; + + long newSleepingTime; + if (sleepingTime == 0) { + newSleepingTime = 5000; + } else { + newSleepingTime = sleepingTime * 2; + } + LOGGER.info("Enqueueing retrial of restarting streams at RSP engine " + + "with base URL {} (sleep time {} ms)", + rspEngine.getBaseUrl(), newSleepingTime); + statusUpdateQueue.add(new RestartRspEngineStreamsTask(newSleepingTime)); + } else { + // update status to UNKNOWN, since no restart request is fired + // (so the system has no clue what the status is at this point) + status = RspEngineStatus.UNKNOWN; + + LOGGER.info("Not enqueueing retrial of restarting streams at RSP engine " + + "with base URL {} since there are new status update tasks", + rspEngine.getBaseUrl()); + } + } + } + + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspQueryLanguage.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspQueryLanguage.java new file mode 100644 index 0000000..53397b1 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/RspQueryLanguage.java @@ -0,0 +1,26 @@ +package be.ugent.idlab.divide.rsp; + +/** + * RSP query languages that are supported by DIVIDE. + * Currently, DIVIDE supports RSP-QL ({@link #RSP_QL}) and C-SPARQL ({@link #CSPARQL}) queries. + */ +public enum RspQueryLanguage { + + RSP_QL, + CSPARQL; + + /** + * @param name case insensitive name of RSP query language to retrieve + * @return {@link RspQueryLanguage} of which the name matches the given + * name (case insensitive); null if no match + */ + public static RspQueryLanguage fromString(String name) { + for (RspQueryLanguage language : RspQueryLanguage.values()) { + if (language.name().equalsIgnoreCase(name)) { + return language; + } + } + return null; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/IRspEngineApiManager.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/IRspEngineApiManager.java new file mode 100644 index 0000000..2d128c8 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/IRspEngineApiManager.java @@ -0,0 +1,82 @@ +package be.ugent.idlab.divide.rsp.api; + +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.rsp.query.IRspQuery; + +/** + * Class capable of registering queries to a wrapped registration URL, + * and unregistering queries from it. + */ +public interface IRspEngineApiManager { + + /** + * Registers a query to the RSP engine registration URL of this API manager. + * + * @param query query to be registered + * @throws RspEngineApiNetworkException when a network error occurs during the registration + * of the query, causing the query to be not correctly + * registered at the RSP engine + * @throws RspEngineApiInputException when the query body cannot be properly encoded into a + * HTTP request for registration at the engine, causing the + * query to be not correctly registered at the RSP engine + * @throws RspEngineApiResponseException when unregistering the query at the RSP engine + * server fails (HTTP status code is not 2xx) + * @throws DivideInvalidInputException when the URL to which the query should be registered + * appears to be invalid and no request can therefore be made + */ + void registerQuery(IRspQuery query) throws + RspEngineApiNetworkException, + RspEngineApiInputException, + RspEngineApiResponseException, + DivideInvalidInputException; + + /** + * Unregisters a query via the RSP engine registration URL of this API manager. + * + * @param query query to be unregistered + * @throws RspEngineApiNetworkException when a network error occurs during unregistering + * the query, causing the query to be not correctly + * registered at the RSP engine + * @throws RspEngineApiResponseException when unregistering the query at the RSP engine + * server fails (HTTP status code is not 2xx) + * @throws DivideInvalidInputException when the URL at which the query should be unregistered + * appears to be invalid and no request can therefore be made + */ + void unregisterQuery(IRspQuery query) throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + DivideInvalidInputException; + + /** + * Pauses the streams of the RSP engine via the RSP engine streams URL of + * this query API manager. + * + * @throws RspEngineApiNetworkException when a network error occurs during pausing the streams, + * causing the streams to be not correctly paused + * @throws RspEngineApiResponseException when pausing the streams at the RSP engine + * server fails (HTTP status code is not 2xx) + * @throws DivideInvalidInputException when the URL at which the streams should be paused + * appears to be invalid and no request can therefore be made + */ + void pauseRspEngineStreams() throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + DivideInvalidInputException; + + /** + * Restarts the streams of the RSP engine via the RSP engine streams URL of + * this query API manager. + * + * @throws RspEngineApiNetworkException when a network error occurs during restarting the streams, + * causing the streams to be not correctly restarted + * @throws RspEngineApiResponseException when restarting the streams at the RSP engine + * server fails (HTTP status code is not 2xx) + * @throws DivideInvalidInputException when the URL at which the streams should be restarted + * appears to be invalid and no request can therefore be made + */ + void restartRspEngineStreams() throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + DivideInvalidInputException; + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiException.java new file mode 100644 index 0000000..0aa7524 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiException.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.rsp.api; + +@SuppressWarnings("unused") +public abstract class RspEngineApiException extends Exception { + + public RspEngineApiException(String description, Exception base) { + super(description, base); + } + + public RspEngineApiException(String description) { + super(description); + } + + public RspEngineApiException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiInputException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiInputException.java new file mode 100644 index 0000000..b539ae9 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiInputException.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.rsp.api; + +@SuppressWarnings("unused") +public class RspEngineApiInputException extends RspEngineApiException { + + public RspEngineApiInputException(String description, Exception base) { + super(description, base); + } + + public RspEngineApiInputException(String description) { + super(description); + } + + public RspEngineApiInputException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiManager.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiManager.java new file mode 100644 index 0000000..0982bae --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiManager.java @@ -0,0 +1,182 @@ +package be.ugent.idlab.divide.rsp.api; + +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.rsp.engine.IRspEngine; +import be.ugent.idlab.divide.rsp.query.IRspQuery; +import be.ugent.idlab.util.http.HttpResponse; +import be.ugent.idlab.util.http.HttpUtilities; +import org.apache.http.client.ClientProtocolException; +import org.apache.http.conn.HttpHostConnectException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; + +class RspEngineApiManager implements IRspEngineApiManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(RspEngineApiManager.class.getName()); + + private final String registrationUrl; + private final String streamsUrl; + + RspEngineApiManager(IRspEngine rspEngine) throws DivideInvalidInputException { + // first try to convert the base URL string to a URL and URI object + // -> is required to perform the registration of queries and streams status + // changes to (a subpath of) this URL later on + // -> if this fails, this means that this URL string is invalid + try { + (new URL(rspEngine.getBaseUrl())).toURI(); + } catch (URISyntaxException | MalformedURLException e) { + throw new DivideInvalidInputException( + "RSP engine URL is invalid"); + } + + this.registrationUrl = rspEngine.getRegistrationUrl(); + this.streamsUrl = rspEngine.getStreamsUrl(); + } + + @Override + public void unregisterQuery(IRspQuery query) throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + DivideInvalidInputException { + String url = String.format("%s/%s", this.registrationUrl, query.getQueryName()); + + LOGGER.info("Unregistering query with name '" + query.getQueryName() + "' at " + url); + + try { + HttpResponse httpResponse = HttpUtilities.delete(url); + + int statusCode = httpResponse.getStatusCode(); + if (statusCode >= 300) { + throw new RspEngineApiResponseException(String.format("RSP engine server " + + "responded with status code %d and error message: %s", + statusCode, httpResponse.getBody())); + } + + } catch (HttpHostConnectException | ClientProtocolException e) { + String description = String.format("Could not unregister query at %s because " + + "of connection issue", url); + LOGGER.error(description, e); + throw new RspEngineApiNetworkException(description, e); + + } catch (URISyntaxException | MalformedURLException e) { + // this can normally not happen, since the URI is input-validated upon the + // creation of this object + String description = String.format("Could not unregister query at %s because " + + "this URL is invalid", url); + LOGGER.error(description, e); + throw new DivideInvalidInputException(description, e); + + } catch (IOException e) { + String description = String.format("Could not unregister query at %s", url); + LOGGER.error(description, e); + throw new RspEngineApiNetworkException(description, e); + } + } + + @Override + public void registerQuery(IRspQuery query) throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + RspEngineApiInputException, + DivideInvalidInputException { + String url = String.format("%s/%s", this.registrationUrl, query.getQueryName()); + + LOGGER.info("Registering query with name '" + query.getQueryName() + "' at " + url); + + try { + HttpResponse httpResponse = HttpUtilities.put(url, query.getQueryBody()); + + int statusCode = httpResponse.getStatusCode(); + if (statusCode >= 300) { + throw new RspEngineApiResponseException(String.format("RSP engine server " + + "responded with status code %d and error message: %s", + statusCode, httpResponse.getBody())); + } + + } catch (HttpHostConnectException | ClientProtocolException e) { + String description = String.format("Could not register query to %s because " + + "of connection issue", url); + LOGGER.error(description, e); + throw new RspEngineApiNetworkException(description, e); + + } catch (UnsupportedEncodingException e) { + String description = String.format("Could not register query to %s because " + + "HTTP request body (= query body) is invalid", url); + LOGGER.error(description, e); + throw new RspEngineApiInputException(description, e); + + } catch (URISyntaxException | MalformedURLException e) { + // this can normally not happen, since the URI is input-validated upon the + // creation of this object + String description = String.format("Could not register query to %s because " + + "this URL is invalid", url); + LOGGER.error(description, e); + throw new DivideInvalidInputException(description, e); + + } catch (IOException e) { + String description = String.format("Could not register query to %s", url); + LOGGER.error(description, e); + throw new RspEngineApiNetworkException(description, e); + } + } + + @Override + public void pauseRspEngineStreams() throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + DivideInvalidInputException { + LOGGER.info("Pausing streams at " + streamsUrl); + updateRspEngineStreamsStatus("pause"); + } + + @Override + public void restartRspEngineStreams() throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + DivideInvalidInputException { + LOGGER.info("Restarting streams at " + streamsUrl); + updateRspEngineStreamsStatus("restart"); + } + + private void updateRspEngineStreamsStatus(String action) throws + RspEngineApiNetworkException, + RspEngineApiResponseException, + DivideInvalidInputException { + try { + HttpResponse httpResponse = HttpUtilities.post(streamsUrl, "action=" + action); + + int statusCode = httpResponse.getStatusCode(); + if (statusCode >= 300) { + throw new RspEngineApiResponseException(String.format("RSP engine server " + + "responded with status code %d and error message: %s", + statusCode, httpResponse.getBody())); + } + + } catch (HttpHostConnectException | ClientProtocolException e) { + String description = String.format("Could not %s streams at %s because " + + "of connection issue", action, streamsUrl); + LOGGER.error(description, e); + throw new RspEngineApiNetworkException(description, e); + + } catch (URISyntaxException | MalformedURLException e) { + // this can normally not happen, since the URI is input-validated upon the + // creation of this object + String description = String.format("Could not %s streams at %s because " + + "this URL is invalid", action, streamsUrl); + LOGGER.error(description, e); + throw new DivideInvalidInputException(description, e); + + } catch (IOException e) { + String description = String.format("Could not %s streams at %s", action, streamsUrl); + LOGGER.error(description, e); + throw new RspEngineApiNetworkException(description, e); + } + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiManagerFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiManagerFactory.java new file mode 100644 index 0000000..9fc8843 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiManagerFactory.java @@ -0,0 +1,19 @@ +package be.ugent.idlab.divide.rsp.api; + +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.rsp.engine.IRspEngine; + +public class RspEngineApiManagerFactory { + + /** + * Creates an {@link IRspEngineApiManager} for the given RSP engine + * @param rspEngine RSP engine for which this API manager exists + * @return an {@link IRspEngineApiManager} for the given RSP engine + * @throws DivideInvalidInputException when the RSP engine has an invalid base URL + */ + public static IRspEngineApiManager createInstance(IRspEngine rspEngine) + throws DivideInvalidInputException { + return new RspEngineApiManager(rspEngine); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiNetworkException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiNetworkException.java new file mode 100644 index 0000000..8e137d1 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiNetworkException.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.rsp.api; + +@SuppressWarnings("unused") +public class RspEngineApiNetworkException extends RspEngineApiException { + + public RspEngineApiNetworkException(String description, Exception base) { + super(description, base); + } + + public RspEngineApiNetworkException(String description) { + super(description); + } + + public RspEngineApiNetworkException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiResponseException.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiResponseException.java new file mode 100644 index 0000000..cef62b3 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/api/RspEngineApiResponseException.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.rsp.api; + +@SuppressWarnings("unused") +public class RspEngineApiResponseException extends RspEngineApiException { + + public RspEngineApiResponseException(String description, Exception base) { + super(description, base); + } + + public RspEngineApiResponseException(String description) { + super(description); + } + + public RspEngineApiResponseException(Exception base) { + super(base); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/IRspEngine.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/IRspEngine.java new file mode 100644 index 0000000..a795ba7 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/IRspEngine.java @@ -0,0 +1,63 @@ +package be.ugent.idlab.divide.rsp.engine; + +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import be.ugent.idlab.divide.rsp.query.IRspQuery; + +import java.util.List; + +/** + * Representation of an RSP engine. It has a query language, registration URL, + * and a set of registered RSP queries ({@link IRspQuery} instances). + */ +public interface IRspEngine { + + /** + * @return the query language used by this RSP engine + */ + RspQueryLanguage getRspQueryLanguage(); + + /** + * @return the base URL of this RSP engine (e.g. used as base URL for constructing + * the URL to which queries should be registered to this RSP engine and + * unregistered from it) + */ + String getBaseUrl(); + + /** + * @return the URL used for registering queries to this RSP engine + * and unregistering queries from it + */ + String getRegistrationUrl(); + + /** + * @return the URL used for updating the status of the streams of this + * RSP engine (i.e. to update whether data is fed to the streams, + * or whether this process is paused) + */ + String getStreamsUrl(); + + /** + * @return blueprint of queries that are currently actually registered + * at this RSP engine + */ + List getRegisteredQueries(); + + /** + * Updates the list of registered queries at this RSP engine + * by adding a new query. + * If the query is already present in the list, nothing happens. + * + * @param query query to be added to the list of registered queries + */ + void addRegisteredQuery(IRspQuery query); + + /** + * Updates the list of registered queries at this RSP engine + * by removing a query. + * If the query is not present in the list, nothing happens. + * + * @param query query to be removed to the list of registered queries + */ + void removeRegisteredQuery(IRspQuery query); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/RspEngine.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/RspEngine.java new file mode 100644 index 0000000..5e8818e --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/RspEngine.java @@ -0,0 +1,62 @@ +package be.ugent.idlab.divide.rsp.engine; + +import be.ugent.idlab.divide.rsp.RspQueryLanguage; +import be.ugent.idlab.divide.rsp.query.IRspQuery; + +import java.util.ArrayList; +import java.util.List; + +public class RspEngine implements IRspEngine { + + private final RspQueryLanguage rspQueryLanguage; + private final String baseUrl; + private final String registrationUrl; + private final String streamsUrl; + private final List registeredQueries; + + public RspEngine(RspQueryLanguage rspQueryLanguage, String url) { + this.rspQueryLanguage = rspQueryLanguage; + this.baseUrl = url; + String formattedBaseUrl = baseUrl.endsWith("/") ? + baseUrl.substring(0, baseUrl.length() - 1) : baseUrl; + this.registrationUrl = String.format("%s/queries", formattedBaseUrl); + this.streamsUrl = String.format("%s/streams", formattedBaseUrl); + this.registeredQueries = new ArrayList<>(); + } + + @Override + public synchronized RspQueryLanguage getRspQueryLanguage() { + return rspQueryLanguage; + } + + @Override + public String getBaseUrl() { + return baseUrl; + } + + @Override + public synchronized String getRegistrationUrl() { + return registrationUrl; + } + + @Override + public String getStreamsUrl() { + return streamsUrl; + } + + @Override + public synchronized List getRegisteredQueries() { + return registeredQueries; + } + + @Override + public synchronized void addRegisteredQuery(IRspQuery query) { + registeredQueries.add(query); + } + + @Override + public synchronized void removeRegisteredQuery(IRspQuery query) { + registeredQueries.remove(query); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/RspEngineFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/RspEngineFactory.java new file mode 100644 index 0000000..043d8de --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/engine/RspEngineFactory.java @@ -0,0 +1,19 @@ +package be.ugent.idlab.divide.rsp.engine; + +import be.ugent.idlab.divide.rsp.RspQueryLanguage; + +public class RspEngineFactory { + + /** + * Creates and returns a new RSP engine with the given parameters. + * + * @param rspQueryLanguage query language used by the new RSP engine + * @param url base URL for communication with the new RSP engine + * @return newly created RSP engine + */ + public static IRspEngine createInstance(RspQueryLanguage rspQueryLanguage, + String url) { + return new RspEngine(rspQueryLanguage, url); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/IRspQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/IRspQuery.java new file mode 100644 index 0000000..8ce1b30 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/IRspQuery.java @@ -0,0 +1,25 @@ +package be.ugent.idlab.divide.rsp.query; + +import be.ugent.idlab.divide.core.query.IDivideQuery; + +/** + * Representation of an RSP query, which has a name and a body. + */ +public interface IRspQuery { + + /** + * @return name of RSP query + */ + String getQueryName(); + + /** + * @return body of RSP query + */ + String getQueryBody(); + + /** + * @return a reference to the DIVIDE query that was instantiated into this RSP query + */ + IDivideQuery getOriginalDivideQuery(); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/RspQuery.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/RspQuery.java new file mode 100644 index 0000000..18b8593 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/RspQuery.java @@ -0,0 +1,61 @@ +package be.ugent.idlab.divide.rsp.query; + +import be.ugent.idlab.divide.core.query.IDivideQuery; + +import java.util.Objects; + +public class RspQuery implements IRspQuery { + + private final String queryName; + private final String queryBody; + private final String rspQLQueryBody; + private final IDivideQuery divideQuery; + + public RspQuery(String queryName, + String queryBody, + String rspQLQueryBody, + IDivideQuery divideQuery) { + this.queryName = queryName; + this.queryBody = queryBody; + this.rspQLQueryBody = rspQLQueryBody; + this.divideQuery = divideQuery; + } + + @Override + public String getQueryName() { + return queryName; + } + + @Override + public String getQueryBody() { + return queryBody; + } + + @Override + public IDivideQuery getOriginalDivideQuery() { + return divideQuery; + } + + // IMPORTANT: equality of RSP queries is defined by their RSP-QL body, + // and NOT by their name or translated body (which might + // contain the name)! + // (since body comparison is done to determine whether + // a query is already registered on an RSP engine or not) + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + RspQuery rspQuery = (RspQuery) o; + return rspQLQueryBody.equals(rspQuery.rspQLQueryBody); + } + + @Override + public int hashCode() { + return Objects.hash(queryBody); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/RspQueryFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/RspQueryFactory.java new file mode 100644 index 0000000..b1c0bdf --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/query/RspQueryFactory.java @@ -0,0 +1,23 @@ +package be.ugent.idlab.divide.rsp.query; + +import be.ugent.idlab.divide.core.query.IDivideQuery; + +public class RspQueryFactory { + + /** + * Creates and returns a new RSP query with the given parameters. + * + * @param queryName name of the new RSP query + * @param queryBody body of the new RSP query + * @param rspQLQueryBody body of the new RSP query in RSP-QL format + * @param divideQuery the DIVIDE query that was instantiated into the new RSP query + * @return newly created RSP query + */ + public static IRspQuery createInstance(String queryName, + String queryBody, + String rspQLQueryBody, + IDivideQuery divideQuery) { + return new RspQuery(queryName, queryBody, rspQLQueryBody, divideQuery); + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/CSparqlQueryTranslator.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/CSparqlQueryTranslator.java new file mode 100644 index 0000000..54229fa --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/CSparqlQueryTranslator.java @@ -0,0 +1,110 @@ +package be.ugent.idlab.divide.rsp.translate; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Class capable of translating an RSP-QL query to C-SPARQL format. + */ +public class CSparqlQueryTranslator implements IQueryTranslator { + + private static final Logger LOGGER = + LoggerFactory.getLogger(CSparqlQueryTranslator.class.getName()); + + private static final Pattern STREAM_PATTERN = Pattern.compile( + "FROM\\s+NAMED\\s+WINDOW\\s+\\S+\\s+ON\\s+(\\S+)\\s+(\\[[^\\[\\]]+])", + Pattern.CASE_INSENSITIVE); + + private static final Pattern WHERE_CLAUSE_PATTERN = Pattern.compile( + "WHERE\\s+\\{[\\s\\S]*}", + Pattern.CASE_INSENSITIVE); + + private static final Pattern WINDOW_START_PATTERN = Pattern.compile( + "WINDOW\\s+\\S+\\s+\\{", + Pattern.CASE_INSENSITIVE); + + @Override + public String translateQuery(String queryBody, String queryName) { + LOGGER.info("Translating query '{}' to C-SPARQL syntax", queryName); + + String result = queryBody; + + // translate stream declarations + Matcher m1 = STREAM_PATTERN.matcher(queryBody); + while (m1.find()) { + String streamPart = m1.group(0); + String streamName = m1.group(1); + String streamParameters = m1.group(2).toLowerCase() + .replace("pt", "") + .replace("range", "RANGE") + .replace("tumbling", "TUMBLING") + .replace("from", "FROM") + .replace("now", "NOW") + .replace("to", "TO") + .replace("slide", "STEP") + .replace("step", "STEP"); + result = result.replace(streamPart, + String.format("FROM STREAM %s %s", streamName, streamParameters)); + } + + // aggregate all windows in WHERE clause + Matcher m2 = WHERE_CLAUSE_PATTERN.matcher(queryBody); + if (m2.find()) { + String whereClause = m2.group(0); + + Matcher m3 = WINDOW_START_PATTERN.matcher(queryBody); + + List indicesToRemove = new ArrayList<>(); + + while (m3.find()) { + + String windowStart = m3.group(0); + int windowStartIndex = whereClause.indexOf(windowStart) + windowStart.length(); + + for (int i = whereClause.indexOf(windowStart); i < windowStartIndex; i++) { + indicesToRemove.add(i); + } + + int braceLevels = 1; + for (int i = windowStartIndex; i < whereClause.length(); i++) { + char c = whereClause.charAt(i); + if (c == '{') { + braceLevels++; + } else if (c == '}') { + braceLevels--; + if (braceLevels == 0) { + + indicesToRemove.add(i); + + break; + } + } + } + } + + StringBuilder newWhereClause = new StringBuilder(); + + indicesToRemove.sort(Integer::compareTo); + + int previousIndex = 0; + for (Integer indexToRemove : indicesToRemove) { + newWhereClause.append(whereClause, previousIndex, indexToRemove); + previousIndex = indexToRemove + 1; + } + newWhereClause.append(whereClause, previousIndex, whereClause.length()); + + result = result.replace(whereClause, newWhereClause.toString()); + } + + // add registration clause + result = "REGISTER QUERY " + queryName + " AS \n" + result; + + return result; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/IQueryTranslator.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/IQueryTranslator.java new file mode 100644 index 0000000..f4b8ba0 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/IQueryTranslator.java @@ -0,0 +1,21 @@ +package be.ugent.idlab.divide.rsp.translate; + +import be.ugent.idlab.divide.rsp.RspQueryLanguage; + +/** + * Translator capable of translating an RSP-QL query to a specific + * {@link RspQueryLanguage}. + */ +public interface IQueryTranslator { + + /** + * Translates an RSP-QL query with the given query body and query name + * to the {@link RspQueryLanguage} of this translator. + * + * @param queryBody body of the query to be translated + * @param queryName name of the query to be translated + * @return query body of the translated query + */ + String translateQuery(String queryBody, String queryName); + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/QueryTranslatorFactory.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/QueryTranslatorFactory.java new file mode 100644 index 0000000..e68eb81 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/QueryTranslatorFactory.java @@ -0,0 +1,28 @@ +package be.ugent.idlab.divide.rsp.translate; + +import be.ugent.idlab.divide.rsp.RspQueryLanguage; + +public class QueryTranslatorFactory { + + /** + * Returns an {@link IQueryTranslator} which can translate an RSP-QL query string + * (= internal DIVIDE representation) to the specified RSP query language + * @param rspQueryLanguage query language to which the query translator should + * translate the RSP-QL queries + * @return {@link IQueryTranslator} which can translate an RSP-QL query string + * to the specified RSP query language + */ + public static IQueryTranslator createInstance(RspQueryLanguage rspQueryLanguage) { + switch(rspQueryLanguage) { + case CSPARQL: + return new CSparqlQueryTranslator(); + + case RSP_QL: + return new RspQLQueryTranslator(); + + default: + throw new IllegalArgumentException("No valid RSP query language given"); + } + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/RspQLQueryTranslator.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/RspQLQueryTranslator.java new file mode 100644 index 0000000..10b345d --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/rsp/translate/RspQLQueryTranslator.java @@ -0,0 +1,18 @@ +package be.ugent.idlab.divide.rsp.translate; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class RspQLQueryTranslator implements IQueryTranslator { + + private static final Logger LOGGER = LoggerFactory.getLogger(RspQLQueryTranslator.class.getName()); + + @Override + public String translateQuery(String queryBody, String queryName) { + LOGGER.info("Translating query '{}' to RSP-QL syntax (= leaving unchanged)", queryName); + + // RSP-QL is the default format used by DIVIDE, so no translation required + return queryBody; + } + +} diff --git a/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/util/LogConstants.java b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/util/LogConstants.java new file mode 100644 index 0000000..e07d07f --- /dev/null +++ b/src/divide-central/divide-engine/src/main/java/be/ugent/idlab/divide/util/LogConstants.java @@ -0,0 +1,13 @@ +package be.ugent.idlab.divide.util; + +import org.slf4j.Marker; +import org.slf4j.MarkerFactory; + +public class LogConstants { + + // LOGGING MARKERS + + public static final Marker UNKNOWN_ERROR_MARKER = MarkerFactory.getMarker("[UNKNOWN_ERROR]"); + public static final Marker METRIC_MARKER = MarkerFactory.getMarker("[METRIC]"); + +} diff --git a/src/divide-central/divide-engine/src/main/resources/log4j2.xml b/src/divide-central/divide-engine/src/main/resources/log4j2.xml new file mode 100644 index 0000000..4c73c81 --- /dev/null +++ b/src/divide-central/divide-engine/src/main/resources/log4j2.xml @@ -0,0 +1,69 @@ + + + + %d{ISO8601} [%t] %-5level %logger{36} - %msg%n + %d{ISO8601}\t[%t]\t%-5level\t%logger{36}\t%marker\t%msg%n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/divide-central/divide-eye/pom.xml b/src/divide-central/divide-eye/pom.xml new file mode 100644 index 0000000..77d27aa --- /dev/null +++ b/src/divide-central/divide-eye/pom.xml @@ -0,0 +1,22 @@ + + + + divide + be.ugent.idlab + 1.0 + + 4.0.0 + + divide-eye + + + + + be.ugent.idlab + divide-engine + 1.0 + + + \ No newline at end of file diff --git a/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQuery.java b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQuery.java new file mode 100644 index 0000000..8911cb0 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQuery.java @@ -0,0 +1,49 @@ +package be.ugent.idlab.divide.queryderivation.eye; + +import java.util.ArrayList; +import java.util.List; + +/** + * Simple class containing the EYE representation of a registered DIVIDE query. + * It contains a reference to the N3 files containing the query pattern, the + * sensor query rule, and the goal. + */ +class EyeDivideQuery { + + private final String queryPatternFilePath; + private final String sensorQueryFilePath; + private final String goalFilePath; + + private final List contextEnrichingQueryFilePaths; + + EyeDivideQuery(String queryPatternFilePath, + String sensorQueryFilePath, + String goalFilePath) { + this.queryPatternFilePath = queryPatternFilePath; + this.sensorQueryFilePath = sensorQueryFilePath; + this.goalFilePath = goalFilePath; + + this.contextEnrichingQueryFilePaths = new ArrayList<>(); + } + + String getQueryPatternFilePath() { + return queryPatternFilePath; + } + + String getSensorQueryFilePath() { + return sensorQueryFilePath; + } + + String getGoalFilePath() { + return goalFilePath; + } + + List getContextEnrichingQueryFilePaths() { + return contextEnrichingQueryFilePaths; + } + + void addContextEnrichingQueryFilePath(String path) { + contextEnrichingQueryFilePaths.add(path); + } + +} diff --git a/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryConverter.java b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryConverter.java new file mode 100644 index 0000000..70d73c6 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryConverter.java @@ -0,0 +1,122 @@ +package be.ugent.idlab.divide.queryderivation.eye; + +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.NodeIterator; +import org.apache.jena.rdf.model.Property; +import org.apache.jena.rdf.model.RDFNode; +import org.apache.jena.rdf.model.ResIterator; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.Statement; +import org.apache.jena.rdf.model.impl.PropertyImpl; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +class EyeDivideQueryConverter { + + private final EyeDivideQueryDeriver queryDeriver; + + private final Property queryBodyProperty, prefixesProperty, + declareProperty, prefixProperty, namespaceProperty; + + EyeDivideQueryConverter(EyeDivideQueryDeriver queryDeriver) { + // save query deriver + this.queryDeriver = queryDeriver; + + // define property which has the instantiated query bodies as triple object + this.queryBodyProperty = new PropertyImpl("http://idlab.ugent.be/sensdesc#queryBody"); + + // define properties related to the extraction of the query prefixes + // (note that, since the getQueries method is only called for the output of a + // single DIVIDE query, there is always only one set of prefixes defined) + this.prefixesProperty = new PropertyImpl("http://www.w3.org/ns/shacl#prefixes"); + this.declareProperty = new PropertyImpl("http://www.w3.org/ns/shacl#declare"); + this.prefixProperty = new PropertyImpl("http://www.w3.org/ns/shacl#prefix"); + this.namespaceProperty = new PropertyImpl("http://www.w3.org/ns/shacl#namespace"); + } + + List getQueries(Model model) { + // iterate over all resources in Jena model that have the query body property + ResIterator iterator = model.listResourcesWithProperty(queryBodyProperty); + + // define list of queries + List queries = new ArrayList<>(); + + while (iterator.hasNext()) { + // retrieve each resource (object) that has the query body property, + // and retrieve the corresponding statement in the query string + Resource resource = iterator.next(); + Statement queryStatement = model.getProperty(resource, queryBodyProperty); + + // get the object of the statement, which is the query body as string literal + // -> retrieve this query body + String query = queryStatement.getObject().asLiteral().toString().trim(); + + // retrieve the RSP-QL prefix string to put in front of the query body, + // to obtain the final RSP-QL query + query = getPrefixString(model, resource) + query; + + // replace \" by " (this is required for string literals occurring in + // the query body) + query = query.replace("\\\"", "\""); + + queries.add(query); + } + + return queries; + } + + private String getPrefixString(Model model, Resource resource) { + // retrieve statement defining the prefix entity of the given query resource + Statement prefixStatement = model.getProperty(resource, prefixesProperty); + String prefixesURI = prefixStatement.getObject().toString(); + + // check if prefixes URI has already been converted, and only recalculate + // if this is not the case + String converted = queryDeriver.retrieveConvertedPrefixesString(prefixesURI); + if (converted == null) { + converted = convertPrefixString(model, prefixStatement); + queryDeriver.saveConvertedPrefixesString(prefixesURI, converted); + } + + return converted; + } + + private String convertPrefixString(Model model, Statement prefixStatement) { + // create iterator over all prefixes declared by the prefix entity + // of the given query resource + NodeIterator prefixIterator = model.listObjectsOfProperty( + prefixStatement.getObject().asResource(), declareProperty); + + StringBuilder prefixesString = new StringBuilder(); + List prefixStrings = new ArrayList<>(); + while (prefixIterator.hasNext()) { + RDFNode node = prefixIterator.next(); + + // for each declared prefix, get the object of both the prefix property + // (a string literal) and the namespace property (a literal of type xsd:anyURI) + String prefixName = model.getProperty(node.asResource(), prefixProperty). + getObject().toString(); + String prefixURI = model.getProperty(node.asResource(), namespaceProperty). + getObject().asLiteral().getValue().toString(); + + // create valid RSP-QL string that defines the extracted prefix + String prefixString = "PREFIX " + prefixName + ": <" + prefixURI + ">\n"; + + prefixStrings.add(prefixString); + } + + // sort array of prefixes to make sure semantically equivalent queries also + // have an equivalent prefix string + Collections.sort(prefixStrings); + + // add strings to string builder + for (String prefixString : prefixStrings) { + prefixesString.append(prefixString); + } + + return prefixesString.toString(); + } + +} diff --git a/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriver.java b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriver.java new file mode 100644 index 0000000..fc41d13 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriver.java @@ -0,0 +1,1015 @@ +package be.ugent.idlab.divide.queryderivation.eye; + +import be.ugent.idlab.divide.core.context.Context; +import be.ugent.idlab.divide.core.context.ContextEnrichingQuery; +import be.ugent.idlab.divide.core.engine.IDivideQueryDeriver; +import be.ugent.idlab.divide.core.engine.IDivideQueryDeriverResult; +import be.ugent.idlab.divide.core.exception.DivideInitializationException; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.core.query.parser.IDivideQueryParser; +import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException; +import be.ugent.idlab.divide.core.query.parser.ParsedSparqlQuery; +import be.ugent.idlab.divide.core.query.parser.Prefix; +import be.ugent.idlab.divide.core.query.parser.SplitSparqlQuery; +import be.ugent.idlab.divide.util.LogConstants; +import be.ugent.idlab.util.bash.BashException; +import be.ugent.idlab.util.eye.EyeReasoner; +import be.ugent.idlab.util.io.IOUtilities; +import be.ugent.idlab.util.rdf.RDFLanguage; +import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaUtilities; +import org.apache.commons.io.FileUtils; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.update.UpdateAction; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.regex.Pattern; + +@SuppressWarnings({"FieldCanBeLocal", "ResultOfMethodCallIgnored"}) +class EyeDivideQueryDeriver implements IDivideQueryDeriver { + + private static final Logger LOGGER = LoggerFactory.getLogger(EyeDivideQueryDeriver.class.getName()); + + /** + * Hidden directory where EYE stores all its files related to the + * DIVIDE query derivation + */ + private static final String DIVIDE_DIRECTORY = ".divide"; + + /** + * Path of JAR resource for generating all triples, used during the EYE preprocessing + */ + private static final String PREPROCESSING_INSTANTIATE_TRIPLES_RESOURCE = + Paths.get("eye", "n3", "preprocessing", "instantiate-triples.n3").toString(); + + /** + * Path of JAR resource for instantiating rules, used during the EYE preprocessing + */ + private static final String PREPROCESSING_INSTANTIATE_RULES_RESOURCE = + Paths.get("eye", "n3", "preprocessing", "instantiate-rules.n3").toString(); + + /** + * Path of JAR resource for processing lists, used during the EYE preprocessing + */ + private static final String PREPROCESSING_LISTS_RESOURCE = + Paths.get("eye", "n3", "preprocessing", "lists.n3").toString(); + + /** + * Path of JAR resource describing the query extraction goal, used during the + * EYE query derivation + */ + private static final String QUERY_EXTRACTION_GOAL_RESOURCE = + Paths.get("eye", "n3", "query-derivation", "query-extraction-goal.n3").toString(); + + /** + * Path of JAR resource describing the window parameter extraction goal, used during the + * EYE query derivation + */ + private static final String WINDOW_PARAMETER_EXTRACTION_GOAL_RESOURCE = + Paths.get("eye", "n3", "query-derivation", "window-parameter-extraction-goal.n3").toString(); + + /** + * Path of JAR resource describing the goal for substituting the input variables + * into the query body, used during the EYE query derivation + */ + private static final String QUERY_INPUT_VARIABLE_SUBSTITUTION_GOAL_RESOURCE = + Paths.get("eye", "n3", "query-derivation", + "query-input-variable-substitution-goal.n3").toString(); + + /** + * Path of JAR resource describing rules for substituting the input variables + * into the query body, used during the EYE query derivation + */ + private static final String QUERY_INPUT_VARIABLE_SUBSTITUTION_RULES_RESOURCE = + Paths.get("eye", "n3", "query-derivation", + "query-input-variable-substitution-rules.n3").toString(); + + /** + * Path of JAR resource describing the supported datatypes for substituting the + * input variables into the query body, used during the EYE query derivation + */ + private static final String QUERY_INPUT_VARIABLE_SUBSTITUTION_SUPPORTED_DATATYPES_RESOURCE = + Paths.get("eye", "n3", "query-derivation", + "query-input-variable-substitution-supported-datatypes.n3").toString(); + + /** + * Path of JAR resource describing the goal for substituting the dynamic window + * parameters into the query body, used during the EYE query derivation + */ + private static final String QUERY_DYNAMIC_WINDOW_PARAMETER_SUBSTITUTION_GOAL_RESOURCE = + Paths.get("eye", "n3", "query-derivation", + "query-dynamic-window-parameter-substitution-goal.n3").toString(); + + /** + * Path of JAR resource describing the rules for substituting the dynamic window + * parameters into the query body, used during the EYE query derivation + */ + private static final String QUERY_DYNAMIC_WINDOW_PARAMETER_SUBSTITUTION_RULES_RESOURCE = + Paths.get("eye", "n3", "query-derivation", + "query-dynamic-window-parameter-substitution-rules.n3").toString(); + + /** + * Path of JAR resource describing the goal for substituting the static window + * parameters into the query body, used during the EYE query derivation + */ + private static final String QUERY_STATIC_WINDOW_PARAMETER_SUBSTITUTION_GOAL_RESOURCE = + Paths.get("eye", "n3", "query-derivation", + "query-static-window-parameter-substitution-goal.n3").toString(); + + /** + * Path of JAR resource describing the rules for substituting the static window + * parameters into the query body, used during the EYE query derivation + */ + private static final String QUERY_STATIC_WINDOW_PARAMETER_SUBSTITUTION_RULES_RESOURCE = + Paths.get("eye", "n3", "query-derivation", + "query-static-window-parameter-substitution-rules.n3").toString(); + + /** + * Path of JAR resource describing a context change as trigger for the + * EYE query derivation + */ + private static final String QUERY_DERIVATION_TRIGGER_CONTEXT_CHANGE_RESOURCE = + Paths.get("eye", "n3", "query-derivation", "trigger", + "trigger-context-change.n3").toString(); + + /** + * Path of JAR resource describing the monitor as trigger for the + * EYE query derivation + */ + private static final String QUERY_DERIVATION_TRIGGER_MONITOR_RESOURCE = + Paths.get("eye", "n3", "query-derivation", "trigger", + "trigger-monitor.n3").toString(); + + /** + * Template of path of query pattern file for a DIVIDE query + * (still to be instantiated with concrete query name) + */ + private static final String EYE_DIVIDE_QUERY_QUERY_PATTERN_PATH_TEMPLATE = + Paths.get("eye", "queries", "%s", "query-pattern.n3").toString(); + + /** + * Template of path of sensor query rule file for a DIVIDE query + * (still to be instantiated with concrete query name) + */ + private static final String EYE_DIVIDE_QUERY_SENSOR_QUERY_RULE_PATH_TEMPLATE = + Paths.get("eye", "queries", "%s", "sensor-query-rule.n3").toString(); + + /** + * Template of path of goal file for a DIVIDE query + * (still to be instantiated with concrete query name) + */ + private static final String EYE_DIVIDE_QUERY_GOAL_PATH_TEMPLATE = + Paths.get("eye", "queries", "%s", "goal.n3").toString(); + + /** + * Template of path of file for a context-enriching query of a DIVIDE query + * written as an EYE rule (still to be instantiated with concrete query name + * and context-enriching query number) + */ + private static final String EYE_DIVIDE_QUERY_CONTEXT_ENRICHING_QUERY_PATH_TEMPLATE = + Paths.get("eye", "queries", "%s", "context-enriching-query-rule-%d.n3").toString(); + + /** + * Date formatter used to create directories to store the timestamped + * results of the EYE query derivation + */ + private static final SimpleDateFormat formatter = + new SimpleDateFormat("yyyyMMdd_HHmmss"); + + /** + * Boolean representing whether the ontology has already been successfully loaded + */ + private boolean ontologyLoaded; + + // DIVIDE files + private final String preprocessingInstantiateTriplesFile; + private final String preprocessingInstantiateRulesFile; + private final String preprocessingListsFile; + private final String queryExtractionGoalFile; + private final String windowParameterExtractionGoalFile; + private final String queryInputVariableSubstitutionGoalFile; + private final String queryInputVariableSubstitutionRulesFile; + private final String queryInputVariableSubstitutionSupportedDatatypesFile; + private final String queryDynamicWindowParameterSubstitutionGoalFile; + private final String queryDynamicWindowParameterSubstitutionRulesFile; + private final String queryStaticWindowParameterSubstitutionGoalFile; + private final String queryStaticWindowParameterSubstitutionRulesFile; + private final String queryDerivationTriggerContextChangeFile; + private final String queryDerivationTriggerMonitorFile; + + // preprocessing input files & options + private final List preprocessingOntologyCreationOptions; + private final List preprocessingTripleCreationInputFiles; + private final List preprocessingTripleCreationOptions; + private final List preprocessingRuleCreationInputFiles; + private final List preprocessingRuleCreationOptions; + private final List preprocessingImageCreationInputFiles; + + // preprocessing output files + private final String ontologyFile; + private final String triplesFile; + private final String rulesFile; + private final String imageFile; + private final String imageFileLoading; + + // query derivation bash inputs + private final List queryExtractionOptions; + private final List windowParameterExtractionOptions; + private final List querySubstitutionOptions; + + /** + * Map keeping track of link between query name and specific + * DIVIDE EYE query + */ + private final Map divideQueryMap; + + /** + * Map keeping track of link between a prefix URI and its RSP-QL prefix + * string converted by an instance of {@link EyeDivideQueryDeriver} + */ + private final Map convertedPrefixesMap; + + private final boolean handleTBoxDefinitionsInContext; + + + // INPUTS FOR SPARQL QUERIES + private static final String SPARQL_QUERY_PREPARE_CONTEXT_INITIAL_RESOURCE = + Paths.get("sparql", "prepare-context-for-query-derivation-initial.query").toString(); + private static final String SPARQL_QUERY_PREPARE_CONTEXT_LOOP_RESOURCE = + Paths.get("sparql", "prepare-context-for-query-derivation-loop.query").toString(); + private final UpdateRequest sparqlQueryPrepareContextInitial; + private final UpdateRequest sparqlQueryPrepareContextLoop; + + + // INPUT CONTAINING TRIPLE SPECIFYING SUBSTITUTION TRIGGER + private enum SubstitutionTrigger { + CONTEXT_CHANGE, MONITOR + } + private final Map substitutionTriggerFilePathMap; + + + EyeDivideQueryDeriver(boolean handleTBoxDefinitionsInContext) throws DivideQueryDeriverException { + try { + // set ontology loaded flag to false + this.ontologyLoaded = false; + + // initialize query map + this.divideQueryMap = new HashMap<>(); + + // initialize map to keep track of prefixes converted by EyeDivideQueryConverter + this.convertedPrefixesMap = new HashMap<>(); + + // save boolean about handling TBox definitions in context + this.handleTBoxDefinitionsInContext = handleTBoxDefinitionsInContext; + + // create DIVIDE directory + File divideDirectory = new File(DIVIDE_DIRECTORY); + boolean created = divideDirectory.mkdir(); + if (!created) { + // if directory already exists, remove all files + FileUtils.cleanDirectory(divideDirectory); + } + + // create copies of resource files in DIVIDE directory + preprocessingInstantiateTriplesFile = + copyResourceToDivideDirectory(PREPROCESSING_INSTANTIATE_TRIPLES_RESOURCE); + preprocessingInstantiateRulesFile = + copyResourceToDivideDirectory(PREPROCESSING_INSTANTIATE_RULES_RESOURCE); + preprocessingListsFile = + copyResourceToDivideDirectory(PREPROCESSING_LISTS_RESOURCE); + queryExtractionGoalFile = + copyResourceToDivideDirectory(QUERY_EXTRACTION_GOAL_RESOURCE); + windowParameterExtractionGoalFile = + copyResourceToDivideDirectory(WINDOW_PARAMETER_EXTRACTION_GOAL_RESOURCE); + queryInputVariableSubstitutionGoalFile = + copyResourceToDivideDirectory(QUERY_INPUT_VARIABLE_SUBSTITUTION_GOAL_RESOURCE); + queryInputVariableSubstitutionRulesFile = + copyResourceToDivideDirectory(QUERY_INPUT_VARIABLE_SUBSTITUTION_RULES_RESOURCE); + queryInputVariableSubstitutionSupportedDatatypesFile = + copyResourceToDivideDirectory(QUERY_INPUT_VARIABLE_SUBSTITUTION_SUPPORTED_DATATYPES_RESOURCE); + queryDynamicWindowParameterSubstitutionGoalFile = + copyResourceToDivideDirectory(QUERY_DYNAMIC_WINDOW_PARAMETER_SUBSTITUTION_GOAL_RESOURCE); + queryDynamicWindowParameterSubstitutionRulesFile = + copyResourceToDivideDirectory(QUERY_DYNAMIC_WINDOW_PARAMETER_SUBSTITUTION_RULES_RESOURCE); + queryStaticWindowParameterSubstitutionGoalFile = + copyResourceToDivideDirectory(QUERY_STATIC_WINDOW_PARAMETER_SUBSTITUTION_GOAL_RESOURCE); + queryStaticWindowParameterSubstitutionRulesFile = + copyResourceToDivideDirectory(QUERY_STATIC_WINDOW_PARAMETER_SUBSTITUTION_RULES_RESOURCE); + queryDerivationTriggerContextChangeFile = + copyResourceToDivideDirectory(QUERY_DERIVATION_TRIGGER_CONTEXT_CHANGE_RESOURCE); + queryDerivationTriggerMonitorFile = + copyResourceToDivideDirectory(QUERY_DERIVATION_TRIGGER_MONITOR_RESOURCE); + + // set paths of ontology file, rules file & EYE image file + // (which do not exist yet, but will be the output of the preprocessing) + ontologyFile = Paths.get(DIVIDE_DIRECTORY, "eye", "n3", "ontology.n3"). + toFile().getCanonicalPath(); + triplesFile = Paths.get(DIVIDE_DIRECTORY, "eye", "n3", "triples.n3"). + toFile().getCanonicalPath(); + rulesFile = Paths.get(DIVIDE_DIRECTORY, "eye", "n3", "rules.n3"). + toFile().getCanonicalPath(); + imageFile = Paths.get(DIVIDE_DIRECTORY, "eye", "ype.pvm"). + toFile().getCanonicalPath(); + imageFileLoading = Paths.get(DIVIDE_DIRECTORY, "eye", "ype-loading.pvm"). + toFile().getCanonicalPath(); + + // set static inputs & options for the different steps of the ontology + // preprocessing (to be readily available when preprocessing should + // start, i.e., when the loadOntology method is called) + preprocessingOntologyCreationOptions = + Arrays.asList("--no-qvars", "--nope"); + preprocessingTripleCreationInputFiles = + Arrays.asList(ontologyFile, preprocessingListsFile, preprocessingInstantiateTriplesFile); + preprocessingTripleCreationOptions = + Arrays.asList("--nope", "--no-skolem", + "http://eulersharp.sourceforge.net/.well-known/genid/myVariables"); + preprocessingRuleCreationInputFiles = + Collections.singletonList(triplesFile); + preprocessingRuleCreationOptions = + Arrays.asList("--nope", "--no-skolem", + "http://eulersharp.sourceforge.net/.well-known/genid/myVariables"); + preprocessingImageCreationInputFiles = + Arrays.asList(triplesFile, rulesFile); + + // set static inputs & options for query derivation + // (to be readily available each time the query derivation is triggered, + // i.e., the deriveQueries method is called) + queryExtractionOptions = Arrays.asList("--nope", "--tactic", "existing-path"); + windowParameterExtractionOptions = Collections.singletonList("--nope"); + querySubstitutionOptions = Collections.singletonList("--nope"); + + // read queries to prepare context for query derivation, + // and parse them as a JENA update query + sparqlQueryPrepareContextInitial = UpdateFactory.create( + readResource(SPARQL_QUERY_PREPARE_CONTEXT_INITIAL_RESOURCE)); + sparqlQueryPrepareContextLoop = UpdateFactory.create( + readResource(SPARQL_QUERY_PREPARE_CONTEXT_LOOP_RESOURCE)); + + // load substitution trigger map + substitutionTriggerFilePathMap = new HashMap<>(); + substitutionTriggerFilePathMap.put(SubstitutionTrigger.CONTEXT_CHANGE, + queryDerivationTriggerContextChangeFile); + substitutionTriggerFilePathMap.put(SubstitutionTrigger.MONITOR, + queryDerivationTriggerMonitorFile); + + } catch (IOException e) { + throw new DivideQueryDeriverException(e); + } + } + + @Override + public void loadOntology(Model ontology) + throws DivideInvalidInputException, DivideInitializationException { + LOGGER.info("LOADING ONTOLOGY: running the ontology preprocessing script " + + "with the given ontology files as input"); + + try { + // write ontology to temp file + String ontologyTurtleFile = writeToTempTurtleFile(ontology); + + // load ontology with EYE and write to N3 ontology file + EyeReasoner.runToFile( + Collections.singletonList(ontologyTurtleFile), + ontologyFile, + preprocessingOntologyCreationOptions); + + // generate all triples by applying OWL-RL rules on N3 ontology and + // write to N3 triples file + EyeReasoner.runToFile( + preprocessingTripleCreationInputFiles, + triplesFile, + preprocessingTripleCreationOptions); + + // generate instantiated OWL-RL rules from collection of inferred + // triples and write to N3 rules file + EyeReasoner.runToFile( + preprocessingRuleCreationInputFiles, + preprocessingInstantiateRulesFile, + rulesFile, + preprocessingRuleCreationOptions); + + // create image of EYE reasoner that has the N3 ontology and rules files + // preloaded into it + // = intermediate code file resulting from Prolog compilation + EyeReasoner.runToImage(preprocessingImageCreationInputFiles, imageFileLoading); + + // if everything is loaded successfully, the existing used image file is overwritten + Files.copy(Paths.get(imageFileLoading), Paths.get(imageFile), + StandardCopyOption.REPLACE_EXISTING); + + // mark the successful loading of the ontology + this.ontologyLoaded = true; + + } catch (BashException e) { + String message = "The ontology contains invalid RDF (should be valid N3)"; + LOGGER.error(message, e); + throw new DivideInvalidInputException(message, e); + + } catch (IOException e) { + String message = "Unknown error during ontology loading"; + LOGGER.error(message, e); + throw new DivideInitializationException(message, e); + } + } + + @Override + public void registerQuery(IDivideQuery divideQuery, + IDivideQueryParser divideQueryParser) + throws DivideQueryDeriverException, DivideInvalidInputException { + // do nothing if given DIVIDE query is null + if (divideQuery == null) { + return; + } + + // only proceed if query with this name does not exist yet + // (will not happen since this is already checked before) + if (!divideQueryMap.containsKey(divideQuery.getName())) { + // validate different fields of query with EYE to ensure they contain valid N3 + validateEyeInput(false, divideQuery.getQueryPattern(), + divideQuery.getSensorQueryRule(), divideQuery.getGoal()); + + // copy EYE DIVIDE query parts to files according to standard file + // templates, substituted with the query name as parent directory + String queryPatternPath = writeToDivideDirectory( + divideQuery.getQueryPattern(), String.format( + EYE_DIVIDE_QUERY_QUERY_PATTERN_PATH_TEMPLATE, + divideQuery.getName())); + String sensorQueryRulePath = writeToDivideDirectory( + divideQuery.getSensorQueryRule(), String.format( + EYE_DIVIDE_QUERY_SENSOR_QUERY_RULE_PATH_TEMPLATE, + divideQuery.getName())); + String goalPath = writeToDivideDirectory( + divideQuery.getGoal(), String.format( + EYE_DIVIDE_QUERY_GOAL_PATH_TEMPLATE, + divideQuery.getName())); + + // create corresponding EYE DIVIDE query keeping track of the + // canonical paths of the different files + EyeDivideQuery eyeDivideQuery = new EyeDivideQuery( + queryPatternPath, sensorQueryRulePath, goalPath); + + // save EYE DIVIDE query + divideQueryMap.put(divideQuery.getName(), eyeDivideQuery); + + // process context enrichment: + // if all context-enriching queries can be written as rules, + // these rules will be appended to the sensor query rule file, + // and the context enrichment can be removed from the DIVIDE query + // -> so originally, the sensor query rule file content only consists + // of the sensor query rule itself + if (divideQuery.getContextEnrichment() != null + && divideQuery.getContextEnrichment().getQueries() != null + && !divideQuery.getContextEnrichment().getQueries().isEmpty()) { + // if the context enrichment contains at least one query, + // check the possibility for each query to write it as a rule + try { + LOGGER.info("REGISTER QUERY: trying to convert existing non-empty query context " + + "enrichment of query {} to a set of EYE rules", divideQuery.getName()); + + // first convert all queries to a string of rules + List queryAsRuleList = new ArrayList<>(); + for (ContextEnrichingQuery query : divideQuery.getContextEnrichment().getQueries()) { + String queryAsRule = convertQueryToEyeRule(query.getQuery(), divideQueryParser); + + // validate rule as EYE input + validateEyeInput(false, queryAsRule); + + // if valid, add to list of query rules + queryAsRuleList.add(queryAsRule); + } + + // register context-enriching query rules to EYE DIVIDE query + // (by writing them to a temp file) + for (int i = 0; i < queryAsRuleList.size(); i++) { + String queryAsRule = queryAsRuleList.get(i); + + // write context-enriching query rule to file + String contextEnrichingQueryRulePath = writeToDivideDirectory( + queryAsRule, String.format( + EYE_DIVIDE_QUERY_CONTEXT_ENRICHING_QUERY_PATH_TEMPLATE, + divideQuery.getName(), i + 1)); + + // register path to file to EYE DIVIDE query + eyeDivideQuery.addContextEnrichingQueryFilePath(contextEnrichingQueryRulePath); + } + + // reset context enrichment of DIVIDE query to avoid that the DIVIDE engine + // executes these queries before starting the EYE query derivation + divideQuery.removeContextEnrichment(); + + LOGGER.info("REGISTER QUERY: existing non-empty query context enrichment of query {} " + + "is successfully converted to a set of EYE rules", divideQuery.getName()); + + } catch (DivideInvalidInputException | DivideQueryDeriverException e) { + LOGGER.warn("REGISTER QUERY: existing non-empty query context enrichment of query {} " + + "cannot be converted to a set of EYE rules", divideQuery.getName()); + // conversion not succeeded -> everything can be left as is: + // - the context enrichment of the query can stay there + // - the sensor query rule file content should not be extended with the rule string + } + } + } + } + + @Override + public void unregisterQuery(IDivideQuery divideQuery) { + // do nothing if given DIVIDE query is null + if (divideQuery == null) { + return; + } + + // remove corresponding EYE DIVIDE query from map + divideQueryMap.remove(divideQuery.getName()); + } + + @Override + public IDivideQueryDeriverResult deriveQueries(String divideQueryName, + Context context, + String componentId) + throws DivideNotInitializedException, DivideQueryDeriverException { + if (!ontologyLoaded) { + throw new DivideNotInitializedException( + "Ontology has not been loaded yet for the query deriver of this DIVIDE engine"); + } + + EyeDivideQuery eyeDivideQuery = divideQueryMap.get(divideQueryName); + + if (eyeDivideQuery != null) { + try { + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_START\t{}\t{}\t{}", + divideQueryName, componentId, context); + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_START_OVERHEAD\t{}\t{}\t{}", + divideQueryName, componentId, context); + + // prepare context for query derivation + Model preparedContext = prepareContextForQueryDerivation(context, divideQueryName); + + // put new context into temporary Turtle file (= N3 syntax) that + // can be read by the query derivation + String contextFile = writeToTempTurtleFile(preparedContext); + + // retrieve canonical paths of input files of EYE DIVIDE query + String sensorQueryFile = eyeDivideQuery.getSensorQueryFilePath(); + String queryPatternFile = eyeDivideQuery.getQueryPatternFilePath(); + String queryGoalFile = eyeDivideQuery.getGoalFilePath(); + + // create output directory for this query derivation + String queryDerivationDirectoryPath = Paths.get(DIVIDE_DIRECTORY, + "query-derivation", componentId, divideQueryName, + formatter.format(new Date())).toString(); + + // construct output files & create parent directories + File proofFile = Paths.get( + queryDerivationDirectoryPath, "proof.n3").toFile(); + String proofFilePath = proofFile.getCanonicalPath(); + File extractedQueriesFile = Paths.get( + queryDerivationDirectoryPath, "extracted-queries.n3").toFile(); + String extractedQueriesFilePath = extractedQueriesFile.getCanonicalPath(); + File extractedWindowParametersFile = Paths.get( + queryDerivationDirectoryPath, "extracted-window-parameters.n3").toFile(); + String extractedWindowParametersFilePath = extractedWindowParametersFile.getCanonicalPath(); + File queriesAfterInputVariableSubstitutionFile = Paths.get( + queryDerivationDirectoryPath, + "queries-after-input-variable-substitution.n3").toFile(); + String queriesAfterInputVariableSubstitutionFilePath = + queriesAfterInputVariableSubstitutionFile.getCanonicalPath(); + File queriesAfterDynamicWindowParameterSubstitutionFile = Paths.get( + queryDerivationDirectoryPath, + "queries-after-dynamic-window-parameter-substitution.n3").toFile(); + String queriesAfterDynamicWindowParameterSubstitutionFilePath = + queriesAfterDynamicWindowParameterSubstitutionFile.getCanonicalPath(); + proofFile.getParentFile().mkdirs(); + + // verify if new TBox definitions in context should be handled + String usedImageFile; + List proofInputFiles = new ArrayList<>(); + if (handleTBoxDefinitionsInContext) { + // if handling new TBox definitions, a new image will be built + // from the prebuilt image, using the context data + // -> only the sensor query file is given as input to the proof + // generation (since the context file is already contained in + // the new image) + usedImageFile = generateNewImageFromContextWithPossibleTBoxDefinitions(contextFile); + proofInputFiles.add(sensorQueryFile); + } else { + // if not handling new TBox definitions (= default), simply use + // the prebuilt image and use the sensor query file & context file + // as input for the proof generation + usedImageFile = imageFile; + proofInputFiles.add(sensorQueryFile); + proofInputFiles.add(contextFile); + } + // -> potential rule files representing context-enriching queries + // are also added to input files for proof generation + proofInputFiles.addAll(eyeDivideQuery.getContextEnrichingQueryFilePaths()); + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_END_OVERHEAD\t{}\t{}\t{}", + divideQueryName, componentId, context); + + // construct proof towards goal + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_START_REASONING\t{}\t{}\t{}", + divideQueryName, componentId, context); + EyeReasoner.runFromImageToFile( + usedImageFile, proofInputFiles, queryGoalFile, proofFilePath, null); + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_END_REASONING\t{}\t{}\t{}", + divideQueryName, componentId, context); + + // extract queries from proof + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_START_EXTRACTION\t{}\t{}\t{}", + divideQueryName, componentId, context); + List queryExtractionInputFiles = Arrays.asList( + proofFilePath, contextFile); + EyeReasoner.runToFile( + queryExtractionInputFiles, queryExtractionGoalFile, + extractedQueriesFilePath, queryExtractionOptions); + + // extract window parameters from proof + List windowParameterExtractionInputFiles = Arrays.asList( + proofFilePath, contextFile); + EyeReasoner.runToFile( + windowParameterExtractionInputFiles, windowParameterExtractionGoalFile, + extractedWindowParametersFilePath, windowParameterExtractionOptions); + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_END_EXTRACTION\t{}\t{}\t{}", + divideQueryName, componentId, context); + + // substitute input variables of extracted queries in query patterns + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_START_INPUT_SUBSTITUTION\t{}\t{}\t{}", + divideQueryName, componentId, context); + List inputVariableSubstitutionInputFiles = Arrays.asList( + queryPatternFile, + extractedQueriesFilePath, + extractedWindowParametersFilePath, + queryInputVariableSubstitutionRulesFile, + queryInputVariableSubstitutionSupportedDatatypesFile); + EyeReasoner.runToFile( + inputVariableSubstitutionInputFiles, + queryInputVariableSubstitutionGoalFile, + queriesAfterInputVariableSubstitutionFilePath, + querySubstitutionOptions); + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_END_INPUT_SUBSTITUTION\t{}\t{}\t{}", + divideQueryName, componentId, context); + + // create an intermediate query derivation result + // -> send this intermediate query derivation result to the window parameter substitution + // TODO: 07/07/2021 maybe save query derivation result for + // (DIVIDE query, component ID) combo? + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_START_WINDOW_SUBSTITUTION\t{}\t{}\t{}", + divideQueryName, componentId, context); + EyeDivideQueryDeriverIntermediateResult eyeDivideQueryDeriverIntermediateResult = + new EyeDivideQueryDeriverIntermediateResult( + queriesAfterInputVariableSubstitutionFilePath, + queriesAfterDynamicWindowParameterSubstitutionFilePath); + EyeDivideQueryDeriverResult result = substituteWindowParametersInQuery( + eyeDivideQueryDeriverIntermediateResult, + SubstitutionTrigger.CONTEXT_CHANGE); + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_END_WINDOW_SUBSTITUTION\t{}\t{}\t{}", + divideQueryName, componentId, context); + LOGGER.debug(LogConstants.METRIC_MARKER, "DERIVE_QUERIES_END\t{}\t{}\t{}", + divideQueryName, componentId, context); + return result; + + } catch (BashException | IOException e) { + throw new DivideQueryDeriverException(e); + } + + } else { + LOGGER.warn("Calling the DIVIDE query derivation for an unknown (unregistered) " + + "DIVIDE query with name '{}'", divideQueryName); + // return empty list of derived queries, since the derivation was called for + // an unknown DIVIDE query + return new EyeDivideQueryDeriverResult(); + } + } + + @Override + public IDivideQueryDeriverResult substituteWindowParameters(String divideQueryName, + Model windowParameters, + String componentId, + IDivideQueryDeriverResult lastResult) + throws DivideQueryDeriverException, DivideNotInitializedException { + if (!ontologyLoaded) { + throw new DivideNotInitializedException( + "Ontology has not been loaded yet for the query deriver of this DIVIDE engine"); + } + + if (lastResult == null) { + throw new DivideQueryDeriverException( + "No valid result was passed to do the window parameter substitution"); + } + + try { + // write new window parameters to temp file + String windowParametersFile = writeToTempTurtleFile(windowParameters); + + // cast the last result to a result of this EYE query deriver + EyeDivideQueryDeriverResult eyeDivideQueryDeriverResult = + (EyeDivideQueryDeriverResult) lastResult; + + // ensure the last result is valid + if (eyeDivideQueryDeriverResult.getIntermediateResult() == null) { + throw new DivideQueryDeriverException( + "No valid result was passed to do the window parameter substitution"); + } + + // do the window parameter substitution again, + // starting from the intermediate query deriver result + // TODO: 07/07/2021 maybe save query derivation result for + // (DIVIDE query, component ID) combo? + return substituteWindowParametersInQuery( + eyeDivideQueryDeriverResult.getIntermediateResult(), + SubstitutionTrigger.MONITOR, + windowParametersFile); + + } catch (BashException | IOException e) { + throw new DivideQueryDeriverException(e); + } + } + + public EyeDivideQueryDeriverResult substituteWindowParametersInQuery( + EyeDivideQueryDeriverIntermediateResult eyeDivideQueryDeriverIntermediateResult, + SubstitutionTrigger trigger, + String... extraInputFiles) throws IOException, BashException { + // substitute dynamic window parameters of extracted queries in query patterns + List dynamicWindowParameterSubstitutionInputFiles = new ArrayList<>(); + dynamicWindowParameterSubstitutionInputFiles.addAll(Arrays.asList( + eyeDivideQueryDeriverIntermediateResult. + getQueriesAfterInputVariableSubstitutionFilePath(), + queryDynamicWindowParameterSubstitutionRulesFile, + substitutionTriggerFilePathMap.get(trigger))); + dynamicWindowParameterSubstitutionInputFiles.addAll(Arrays.asList(extraInputFiles)); + EyeReasoner.runToFile( + dynamicWindowParameterSubstitutionInputFiles, + queryDynamicWindowParameterSubstitutionGoalFile, + eyeDivideQueryDeriverIntermediateResult. + getQueriesAfterDynamicWindowParameterSubstitutionFilePath(), + querySubstitutionOptions); + + // substitute static window parameters of extracted queries in query patterns + List staticWindowParameterSubstitutionInputFiles = Arrays.asList( + eyeDivideQueryDeriverIntermediateResult. + getQueriesAfterDynamicWindowParameterSubstitutionFilePath(), + queryStaticWindowParameterSubstitutionRulesFile); + String queriesAfterStaticWindowParameterSubstitution = EyeReasoner.run( + staticWindowParameterSubstitutionInputFiles, + queryStaticWindowParameterSubstitutionGoalFile, + querySubstitutionOptions); + + // convert substituted queries (in N3 = Turtle format) to Jena model + Model substitutedQueriesModel = JenaUtilities.parseString( + queriesAfterStaticWindowParameterSubstitution, RDFLanguage.TURTLE); + + // convert queries to individual RSP-QL query strings + EyeDivideQueryConverter queryConverter = new EyeDivideQueryConverter(this); + List convertedQueries = queryConverter.getQueries(substitutedQueriesModel); + + // create and return a query derivation result + return new EyeDivideQueryDeriverResult( + eyeDivideQueryDeriverIntermediateResult, + queriesAfterStaticWindowParameterSubstitution, + substitutedQueriesModel, + convertedQueries); + } + + private Model prepareContextForQueryDerivation(Context context, + String divideQueryName) { + long start = System.currentTimeMillis(); + + Model model = context.getContext(); + UpdateAction.execute(sparqlQueryPrepareContextInitial, model); + Model copy = ModelFactory.createDefaultModel(); + while (model.size() != copy.size()) { + copy = ModelFactory.createDefaultModel(); + copy.add(model); + UpdateAction.execute(sparqlQueryPrepareContextLoop, model); + } + + long end = System.currentTimeMillis(); + LOGGER.info("Prepared context for DIVIDE query {} and context {} in {} seconds", + divideQueryName, context.getId(), (end - start)); + + return model; + } + + private String generateNewImageFromContextWithPossibleTBoxDefinitions(String contextFile) + throws IOException, BashException { + // generate temporary files for outputs of reasoner + String triplesFile = File.createTempFile("triples", ".ttl").getCanonicalPath(); + String rulesFile = File.createTempFile("rules", ".ttl").getCanonicalPath(); + String newImageFile = File.createTempFile("ype", ".pvm").getCanonicalPath(); + + // generate new triples from applying all OWL-RL rules to image (with original + // TBox) and new context + EyeReasoner.runFromImageToFile( + imageFile, + Arrays.asList(contextFile, preprocessingListsFile, + preprocessingInstantiateTriplesFile), + triplesFile, + preprocessingTripleCreationOptions); + + // generate new rules from the set of new triples + EyeReasoner.runToFile( + Collections.singletonList(triplesFile), + preprocessingInstantiateRulesFile, + rulesFile, + preprocessingRuleCreationOptions); + + // create new image based on new triples and rules + EyeReasoner.runToImage( + Arrays.asList(triplesFile, rulesFile), + newImageFile); + + return newImageFile; + } + + synchronized void saveConvertedPrefixesString(String uri, String converted) { + this.convertedPrefixesMap.put(uri, converted); + } + + /** + * @return converted prefix string for the given URI if this has already + * been registered with the {@link #saveConvertedPrefixesString(String, String)} + * method; null otherwise + */ + synchronized String retrieveConvertedPrefixesString(String uri) { + return this.convertedPrefixesMap.get(uri); + } + + /** + * Checks whether the EYE input is invalid or not. + * To be valid, an input should be valid N3 and should not be empty (unless it is + * specified that an empty can be empty) + * + * @param allowEmpty specifies whether the inputs can be empty or not + * (if not, a DivideInvalidInputException will be thrown if any + * of the inputs is empty) + * @param inputs string inputs to be validated (NOT files, but the actual inputs themselves) + * @throws DivideInvalidInputException if any of the inputs contains invalid N3, or if any + * of the inputs is empty while the allowEmpty parameter + * is set to false + * @throws DivideQueryDeriverException if something went wrong during the validation, making it + * impossible to validate this + */ + @SuppressWarnings("SameParameterValue") + private void validateEyeInput(boolean allowEmpty, String... inputs) + throws DivideInvalidInputException, DivideQueryDeriverException { + try { + // write all inputs to temporary files + List inputFiles = new ArrayList<>(); + for (String input : inputs) { + // check if empty when not allowed + if (!allowEmpty && input.trim().isEmpty()) { + throw new DivideInvalidInputException("Some of the inputs are empty"); + } + + String tempFile = IOUtilities.writeToTempFile( + input, + "input-" + UUID.randomUUID() + "-" + System.currentTimeMillis(), ".ttl"); + if (tempFile == null) { + throw new IOException("Error when writing input to temp file for validation"); + } + inputFiles.add(tempFile); + } + + // simply read in input files with EYE reasoner, and output similarly + // to how the ontology is outputted + // => EYE input reading step will already fail if the input is invalid + // and therefore generate a BashException + EyeReasoner.run( + inputFiles, + preprocessingOntologyCreationOptions); + + } catch (BashException e) { + String message = "Some of the inputs contain invalid RDF (should be valid N3)"; + LOGGER.error(message, e); + throw new DivideInvalidInputException(message, e); + + } catch (IOException e) { + String message = "Unknown error during validation of query input"; + LOGGER.error(message, e); + throw new DivideQueryDeriverException(message, e); + } + } + + private String copyResourceToDivideDirectory(String resource) throws DivideQueryDeriverException { + // read resource into content string + String content = readResource(resource); + + // write content string to DIVIDE directory on same path as resource + // and return canonical path of this new file + return writeToDivideDirectory(content, resource); + } + + private String readResource(String resource) { + return IOUtilities.readFileIntoString( + getClass().getResourceAsStream(String.format("/%s", resource))); + } + + /** + * @return canonical path of file written to DIVIDE directory + */ + private String writeToDivideDirectory(String content, String relativePath) + throws DivideQueryDeriverException { + try { + // create file objects and create required parent directories + // (if not existing yet) + File file = new File(DIVIDE_DIRECTORY, relativePath); + file.getParentFile().mkdirs(); + + // get canonical path of new file + String canonicalPath = file.getCanonicalPath(); + + // write content to new file + IOUtilities.writeToFile(content, canonicalPath); + + return canonicalPath; + + } catch (IOException e) { + throw new DivideQueryDeriverException(e); + } + } + + /** + * @return canonical path of created temporary Turtle file + */ + private String writeToTempTurtleFile(Model triples) throws IOException { + String tempFile = IOUtilities.writeToTempFile( + JenaUtilities.serializeModel(triples, RDFLanguage.TURTLE), + "triples-" + UUID.randomUUID() + "-" + System.currentTimeMillis(), ".ttl"); + + if (tempFile == null) { + throw new IOException("Error when writing data to temp file"); + } + + return tempFile; + } + + + + // EYE QUERY TO RULE CONVERSION + + private static final String SENSOR_QUERY_RULE_ADDITIONAL_RULE_TEMPLATE = + "%s\n\n{\n%s\n}\n=>\n{\n%s\n} ."; + + private String convertQueryToEyeRule(String query, IDivideQueryParser divideQueryParser) + throws DivideQueryDeriverException { + try { + // parse query + ParsedSparqlQuery parsedSparqlQuery = divideQueryParser.parseSparqlQuery(query); + SplitSparqlQuery splitSparqlQuery = parsedSparqlQuery.getSplitSparqlQuery(); + + // check if query defines a dynamic window parameter + // -> if so, it cannot be rewritten as a rule since then the idea of "adding an empty + // list of dynamic window parameters when having none defined in the context after + // query derivation" can no longer be applied + String sdPrefix = "%%%INVALID SPARQL SEQUENCE%%%"; + String sdQueryPrefix = "%%%INVALID SPARQL SEQUENCE%%%"; + for (Prefix prefix : parsedSparqlQuery.getPrefixes()) { + if ("".equals(prefix.getUri())) { + sdPrefix = prefix.getName(); + } + if ("".equals(prefix.getUri())) { + sdQueryPrefix = prefix.getName(); + } + } + if (Pattern.compile(String.format( + "((%spattern)|())\\s+" + + "((%swindowParameters)|())", + sdQueryPrefix, sdPrefix)). + matcher(splitSparqlQuery.getResultPart()).find()) { + String message = "Context-enriching queries contain at " + + "least one query defining dynamic window parameters"; + LOGGER.warn(message); + throw new DivideQueryDeriverException(message); + } + + // convert query to rule + return String.format(SENSOR_QUERY_RULE_ADDITIONAL_RULE_TEMPLATE, + divideQueryParser.getTurtlePrefixList(parsedSparqlQuery.getPrefixes()), + splitSparqlQuery.getWherePart(), + splitSparqlQuery.getResultPart()); + + } catch (InvalidDivideQueryParserInputException e) { + throw new DivideQueryDeriverException(e); + } + } + +} diff --git a/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverFactory.java b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverFactory.java new file mode 100644 index 0000000..27ce54a --- /dev/null +++ b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverFactory.java @@ -0,0 +1,31 @@ +package be.ugent.idlab.divide.queryderivation.eye; + +import be.ugent.idlab.divide.core.engine.IDivideQueryDeriver; +import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException; + +public class EyeDivideQueryDeriverFactory { + + /** + * Returns a new instance of {@link IDivideQueryDeriver} which uses + * the EYE reasoner to perform the query derivation. + * + * @param handleTBoxDefinitionsInContext boolean specifying whether the EYE query deriver + * should allow to specify TBox definitions in the + * context updates sent for the query derivation; + * if true, this means that the EYE query deriver should + * scan the context for new OWL-RL axioms and rules + * upon each query derivation call, heavily impacting + * the duration of the query derivation task (since EYE + * will create a new image, starting from the preloaded + * ontology image, with all new rules appended that follow + * from the processing of any TBox definitions in the context) + * @return a new instance of {@link IDivideQueryDeriver} based on the EYE reasoner + * @throws DivideQueryDeriverException when something goes wrong during the initialization + * of the new query deriver + */ + public static IDivideQueryDeriver createInstance(boolean handleTBoxDefinitionsInContext) + throws DivideQueryDeriverException { + return new EyeDivideQueryDeriver(handleTBoxDefinitionsInContext); + } + +} diff --git a/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverIntermediateResult.java b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverIntermediateResult.java new file mode 100644 index 0000000..853882d --- /dev/null +++ b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverIntermediateResult.java @@ -0,0 +1,35 @@ +package be.ugent.idlab.divide.queryderivation.eye; + +public class EyeDivideQueryDeriverIntermediateResult { + + private final String queriesAfterInputVariableSubstitutionFilePath; + private final String queriesAfterDynamicWindowParameterSubstitutionFilePath; + + public EyeDivideQueryDeriverIntermediateResult( + String queriesAfterInputVariableSubstitutionFilePath, + String queriesAfterDynamicWindowParameterSubstitutionFilePath) { + this.queriesAfterInputVariableSubstitutionFilePath = + queriesAfterInputVariableSubstitutionFilePath; + this.queriesAfterDynamicWindowParameterSubstitutionFilePath = + queriesAfterDynamicWindowParameterSubstitutionFilePath; + } + + public String getQueriesAfterInputVariableSubstitutionFilePath() { + return queriesAfterInputVariableSubstitutionFilePath; + } + + public String getQueriesAfterDynamicWindowParameterSubstitutionFilePath() { + return queriesAfterDynamicWindowParameterSubstitutionFilePath; + } + + @Override + public String toString() { + return "EyeDivideQueryDeriverIntermediateResult{" + + "queriesAfterInputVariableSubstitutionFilePath='" + + queriesAfterInputVariableSubstitutionFilePath + '\'' + + ", queriesAfterDynamicWindowParameterSubstitutionFilePath='" + + queriesAfterDynamicWindowParameterSubstitutionFilePath + '\'' + + '}'; + } + +} diff --git a/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverResult.java b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverResult.java new file mode 100644 index 0000000..d7a6a94 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/java/be/ugent/idlab/divide/queryderivation/eye/EyeDivideQueryDeriverResult.java @@ -0,0 +1,66 @@ +package be.ugent.idlab.divide.queryderivation.eye; + +import be.ugent.idlab.divide.core.engine.IDivideQueryDeriverResult; +import be.ugent.idlab.util.rdf.RDFLanguage; +import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaUtilities; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; + +import java.util.ArrayList; +import java.util.List; + +public class EyeDivideQueryDeriverResult implements IDivideQueryDeriverResult { + + private final EyeDivideQueryDeriverIntermediateResult intermediateResult; + private final String queriesAfterStaticWindowParameterSubstitution; + private final Model substitutedQueriesModel; + private final List substitutedRspQlQueries; + + public EyeDivideQueryDeriverResult() { + this.intermediateResult = null; + this.queriesAfterStaticWindowParameterSubstitution = null; + this.substitutedQueriesModel = ModelFactory.createDefaultModel(); + this.substitutedRspQlQueries = new ArrayList<>(); + } + + public EyeDivideQueryDeriverResult(EyeDivideQueryDeriverIntermediateResult intermediateResult, + String queriesAfterStaticWindowParameterSubstitution, + Model substitutedQueriesModel, + List substitutedRspQlQueries) { + this.intermediateResult = intermediateResult; + this.queriesAfterStaticWindowParameterSubstitution = + queriesAfterStaticWindowParameterSubstitution; + this.substitutedQueriesModel = substitutedQueriesModel; + this.substitutedRspQlQueries = substitutedRspQlQueries; + } + + public EyeDivideQueryDeriverIntermediateResult getIntermediateResult() { + return intermediateResult; + } + + public String getQueriesAfterStaticWindowParameterSubstitution() { + return queriesAfterStaticWindowParameterSubstitution; + } + + public Model getSubstitutedQueriesModel() { + return substitutedQueriesModel; + } + + @Override + public List getSubstitutedRspQlQueries() { + return substitutedRspQlQueries; + } + + @Override + public String toString() { + return "EyeDivideQueryDeriverResult{" + + "intermediateResult=" + intermediateResult + + ", queriesAfterStaticWindowParameterSubstitution='" + + queriesAfterStaticWindowParameterSubstitution + '\'' + + ", substitutedQueriesModel=" + JenaUtilities.serializeModel( + substitutedQueriesModel, RDFLanguage.TURTLE) + + ", substitutedRspQlQueries=" + substitutedRspQlQueries + + '}'; + } + +} diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-rules.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-rules.n3 new file mode 100644 index 0000000..b085555 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-rules.n3 @@ -0,0 +1,217 @@ +PREFIX rdf: +@prefix rdfs: . +@prefix owl: . +@prefix list: . +@prefix log: . +@prefix e: . +Prefix var: +Prefix : + +#with this file we produce a new version of the ontology including additional rules for rdfs/owl-concepts + + +########################### +#rules for rdfs:subclass +######################### +{ +?C rdfs:subClassOf ?D. +}=> {{?x a ?C} => {?x a ?D}.}. + + + + +############################# +# "range-rules" +############################# +{?P rdfs:range ?C.}=>{{?X ?P ?Y} => {?Y a ?C}.}. + + +############################# +# Adding "inverse of" rules +############################# + +{?P owl:inverseOf ?Q.}=>{ +{ ?S ?P ?O} => {?O ?Q ?S}. +{ ?S ?Q ?O} => {?O ?P ?S}. +}. + + +####################################### +# Intersection rules +####################################### +{?C owl:intersectionOf ?L. ?D list:in ?L} => {{?X a ?C.}=>{?X a ?D}}. + +{?C owl:intersectionOf ?L. ?S e:findall ({var:x a ?Y.} {?Y list:in ?L} ?gl). ?gl log:conjunction ?l. +} => {?l => {var:x a ?C}.}. + + +######################################### +# OWL equivalent class +######################################### + +{?A owl:equivalentClass ?B. } => {{?X a ?A}=>{?X a ?B}. {?X a ?B} => {?X a ?A}}. + + +###################################################### +# OWL some values from +#################################################### + +{?C owl:someValuesFrom ?Y. ?C owl:onProperty ?P. } => {{?U ?P ?V. ?V a ?Y} => {?U a ?C}}. +{?C owl:someValuesFrom owl:Thing. ?C owl:onProperty ?P. } => {{?U ?P ?V}=>{?U a ?C}}. + + +#################################################### +# rdfs subproperty +###################################################### + +{?P rdfs:subPropertyOf ?Q. } => {{?S ?P ?O.}=>{?S ?Q ?O}}. + + +######################################################### +# OWL all different +####################################################### + + + +######################################################## +# OWL all disjoint +######################################################## + + +{?X a owl:AllDisjointClasses. ?X owl:members ?L. ?C list:in ?L. ?D list:in ?L. ?C log:notEqualTo ?D. } =>{{?I a ?C. ?I a ?D}=> false}. + + +############################################################## +# OWL all disjoint properties +############################################################## + + +{?X a owl:AllDisjointProperties. ?X owl:members ?L. ?P list:in ?L. ?Q list:in ?L. ?P log:notEqualTo ?Q. } =>{{?S ?P ?O. ?S ?Q ?O} => false}. + +############################################################## +# OWL allValuesFrom +############################################################# + +{?C owl:allValuesFrom ?Y. ?C owl:onProperty ?P. } => {{?U a ?C. ?U ?P ?V}=>{?V a ?Y}}. + +############################################################# +# OWL assymetric property +############################################################# + +{?P a owl:AsymmetricProperty. } => {{?S ?P ?O. ?O ?P ?S}=>false}. + +####################################################### +# OWL complementof +##################################################### + + +{?C owl:complementOf ?D. } =>{{?X a ?C. ?X a ?D} => false}. + +################################################### +# OWL differentFrom +################################################## + + + +######################################################## +# OWL disjointWith +####################################################### + +{?A owl:disjointWith ?B. } => {{?X a ?A. ?Y a ?B }=>{?X owl:differentFrom ?Y}}. +{?A owl:disjointWith ?B. } => {{?X a ?A. ?X a ?B}=>false}. + + +########################################################### +# disjointUnionOf +######################################################### + + +################################################## +# distinct members +################################################## + +###################################################### +# equivalent property +###################################################### +{?P owl:equivalentProperty ?Q. } => {{?S ?P ?O}=>{?S ?Q ?O}. {?S ?Q ?O} => {?S ?P ?O}}. + +#################################################### +# functional property +#################################################### + +{?P a owl:FunctionalProperty. } => {{?S ?P ?X. ?S ?P ?Y}=>{?X owl:sameAs ?Y}}. +{?P a owl:FunctionalProperty. } =>{{?S ?P ?X. ?S ?P ?Y. ?X owl:differentFrom ?Y}=> false}. + +############################################################ +# has value +############################################################ +{?C owl:hasValue ?V. ?C owl:onProperty ?P. } => {{?U a ?C} => {?U ?P ?V}.{?U ?P ?V} => {?U a ?C}}. +{?C owl:hasValue ?V. ?C owl:onProperty ?P. } => {{?X ?P ?Y. ?X a ?C. ?V owl:differentFrom ?Y}=> false}. + +############################################################### +# Inverse functional property +############################################################# +{?P a owl:InverseFunctionalProperty. } => {{?X ?P ?O. ?Y ?P ?O}=>{?X owl:sameAs ?Y}}. +{?P a owl:InverseFunctionalProperty. } => {{?X ?P ?O. ?Y ?P ?O. ?X owl:differentFrom ?Y}=>false}. + +################################################################ +# irreflexive property +################################################################ + +{?P a owl:IrreflexiveProperty. } => {{?X ?P ?X}=>false}. + + +################################################################ +# max cardinality +############################################################### +{?C owl:maxCardinality 1. ?C owl:onProperty ?P. } => {{?U a ?C. ?U ?P ?Y. ?U ?P ?Z}=>{?Y owl:sameAs ?Z}}. +{?C owl:maxCardinality 0. ?C owl:onProperty ?P. } => {{?X ?P ?Y. ?X a ?C}=> false}. +{?C owl:maxCardinality 1. ?C owl:onProperty ?P. } => {{?X ?P ?Y. ?X ?P ?Z. ?X a ?C. ?Z owl:differentFrom ?Y}=>false}. + +############################################################### +# max qualified cardinality +############################################################### +{?C owl:maxQualifiedCardinality 1. ?C owl:onProperty ?P. ?C owl:onClass ?D. } => {{?U a ?C. ?U ?P ?Y. ?Y a ?D. ?U ?P ?Z. ?Z a ?D}=>{?Y owl:sameAs ?Z}}. +{?C owl:maxQualifiedCardinality 1. ?C owl:onProperty ?P. ?C owl:onClass owl:Thing. } => {{?U a ?C. ?U ?P ?Y. ?U ?P ?Z} => {?Y owl:sameAs ?Z}}. +{?C owl:maxQualifiedCardinality 0. ?C owl:onProperty ?P. ?C owl:onClass ?C. } => {{?U a ?C. ?U ?P ?Y. ?Y a ?C}=>false}. +{?C owl:maxQualifiedCardinality 0. ?C owl:onProperty ?P. ?C owl:onClass owl:Thing.} =>{{ ?U a ?C. ?U ?P ?Y}=> false}. + +################################################################## +# one of +################################################################ + +############################################################## +# property disjoint with +############################################################## + +{?P owl:propertyDisjointWith ?Q. } =>{{?X ?P ?Y. ?X ?Q ?Y}=> false}. + + +################################################################# +# same as +############################################################### + + +{?X owl:sameAs ?Y. } => {{?X ?P ?O}=>{?Y ?P ?O}}. +{?X owl:sameAs ?Y. } => {{?S ?X ?O}=>{?S ?Y ?O}}. +{?X owl:sameAs ?Y. } => {{?S ?P ?X}=>{?S ?P ?Y}}. + + + +############################################################### +# Symmetric and transitive property +############################################################## + +{?P a owl:SymmetricProperty. } => {{?S ?P ?O}=>{?O ?P ?S}}. +{?P a owl:TransitiveProperty. } => {{?S ?P ?X. ?X ?P ?O.}=>{?S ?P ?O}}. + +#################################################################### +# union of +################################################################### +{?C owl:unionOf ?L. ?D list:in ?L. } => {{?X a ?D}=>{?X a ?C}}. + + +#################################################################### +# property chain +################################################################### +{?P owl:propertyChainAxiom ( ?P1 ?P2 ) .} => {{?X ?P1 ?Y. ?Y ?P2 ?Z.}=>{?X ?P ?Z}}. diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-rules_old.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-rules_old.n3 new file mode 100644 index 0000000..a331148 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-rules_old.n3 @@ -0,0 +1,269 @@ +PREFIX rdf: +@prefix rdfs: . +@prefix owl: . +@prefix list: . +@prefix log: . +@prefix e: . +Prefix var: +Prefix : + +#with this file we produce a new version of the ontology including additional rules for rdfs/owl-concepts + + +#keep the whole Ontology +#{?s ?p ?o} => {?s ?p ?o}. + + +########################### +#rules for rdfs:subclass +######################### +{ +?C rdfs:subClassOf ?D. +}=> {{?x a ?C} => {?x a ?D}.}. + + +# for pure reasoning this rule is not really necessary. It is just useful for the case that someone for example wants to specifically know all subclasses of a certain class. +{?C rdfs:subClassOf ?D. ?D rdfs:subClassOf ?E} => {?C rdfs:subClassOf ?E}. +{?C rdfs:subClassOf ?D. ?P rdfs:domain ?C} => {?P rdfs:domain ?D}. +{?C rdfs:subClassOf ?D. ?P rdfs:range ?C} => {?P rdfs:range ?D}. + + + + +############################# +# "range-rules" +############################# +{?P rdfs:range ?C.}=>{{?X ?P ?Y} => {?Y a ?C}.}. + + +############################# +# Adding "inverse of" rules +############################# + +{?P owl:inverseOf ?Q.}=>{ +{ ?S ?P ?O} => {?O ?Q ?S}. +{ ?S ?Q ?O} => {?O ?P ?S}. +?Q owl:inverseOf ?P +}. + + +####################################### +# Intersection rules +####################################### +{?C owl:intersectionOf ?L. ?D list:in ?L} => {{?X a ?C.}=>{?X a ?D}}. +{?C owl:intersectionOf ?L. ?D list:in ?L} => {?C rdfs:subClassOf ?D}. + +{?C owl:intersectionOf ?L. ?S e:findall ({var:x a ?Y.} {?Y list:in ?L} ?gl). ?gl log:conjunction ?l. +} => {?l => {var:x a ?C}.}. + + +######################################### +# OWL equivalent class +######################################### + +{?A owl:equivalentClass ?B. } => {{?X a ?A}=>{?X a ?B}. {?X a ?B} => {?X a ?A}}. +{?A owl:equivalentClass ?B} => {?B owl:equivalentClass ?A}. +{?A owl:equivalentClass ?B. ?B owl:equivalentClass ?C} => {?A owl:equivalentClass ?C}. +{?A owl:equivalentClass ?B} => {?A rdfs:subClassOf ?B. ?B rdfs:subClassOf ?A}. +{?A rdfs:subClassOf ?B. ?B rdfs:subClassOf ?A} => {?A owl:equivalentClass ?B}. + + +###################################################### +# OWL some values from +#################################################### + +{?C owl:someValuesFrom ?Y. ?C owl:onProperty ?P. } => {{?U ?P ?V. ?V a ?Y} => {?U a ?C}}. +{?C owl:someValuesFrom owl:Thing. ?C owl:onProperty ?P. } => {{?U ?P ?V}=>{?U a ?C}}. +{?C owl:someValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:someValuesFrom ?Z. ?D owl:onProperty ?P. ?Y rdfs:subClassOf ?Z} => {?C rdfs:subClassOf ?D}. +{?C owl:someValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:someValuesFrom ?Y. ?D owl:onProperty ?Q. ?P rdfs:subPropertyOf ?Q} => {?C rdfs:subClassOf ?D}. + +# non RL extension + +# {?C owl:onProperty ?P; owl:someValuesFrom ?Y.}=>{ +# {?x a ?C}=>{?x ?P _:x. _:x a ?Y} +# }. + +#################################################### +# rdfs subproperty +###################################################### + +{?P rdfs:subPropertyOf ?Q. } => {{?S ?P ?O.}=>{?S ?Q ?O}}. +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:subPropertyOf ?R} => {?P rdfs:subPropertyOf ?R}. +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:domain ?C} => {?P rdfs:domain ?C}. +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:range ?C} => {?P rdfs:range ?C}. + + +######################################################### +# OWL all different +####################################################### + +#open for later +{?A a owl:AllDifferent. ?A owl:members ?L. ?X list:in ?L. ?Y list:in ?L. ?X log:notEqualTo ?Y. ?X owl:sameAs ?Y} => false. +{?A a owl:AllDifferent. ?A owl:distinctMembers ?L. ?X list:in ?L. ?Y list:in ?L. ?X log:notEqualTo ?Y. ?X owl:sameAs ?Y} => false. + + +######################################################## +# OWL all disjoint +######################################################## + + +{?X a owl:AllDisjointClasses. ?X owl:members ?L. ?C list:in ?L. ?D list:in ?L. ?C log:notEqualTo ?D. } =>{{?I a ?C. ?I a ?D}=> false}. + + +############################################################## +# OWL all disjoint properties +############################################################## + + +{?X a owl:AllDisjointProperties. ?X owl:members ?L. ?P list:in ?L. ?Q list:in ?L. ?P log:notEqualTo ?Q. } =>{{?S ?P ?O. ?S ?Q ?O} => false}. + +############################################################## +# OWL allValuesFrom +############################################################# + +{?C owl:allValuesFrom ?Y. ?C owl:onProperty ?P. } => {{?U a ?C. ?U ?P ?V}=>{?V a ?Y}}. +{?C owl:allValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:allValuesFrom ?Z. ?D owl:onProperty ?P. ?Y rdfs:subClassOf ?Z} => {?C rdfs:subClassOf ?D}. +{?C owl:allValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:allValuesFrom ?Y. ?D owl:onProperty ?Q. ?P rdfs:subPropertyOf ?Q} => {?D rdfs:subClassOf ?C}. + +############################################################# +# OWL assymetric property +############################################################# + +{?P a owl:AsymmetricProperty. } => {{?S ?P ?O. ?O ?P ?S}=>false}. + +####################################################### +# OWL complementof +##################################################### + + +{?C owl:complementOf ?D} => {?D owl:complementOf ?C}. +{?C owl:complementOf ?D} => {?D owl:disjointWith ?C}. +{?C owl:complementOf ?D. } =>{{?X a ?C. ?X a ?D} => false}. + +################################################### +# OWL differentFrom +################################################## + + +{?A owl:differentFrom ?B} => {?B owl:differentFrom ?A}. + +######################################################## +# OWL disjointWith +####################################################### + +{?A owl:disjointWith ?B. } => {{?X a ?A. ?Y a ?B }=>{?X owl:differentFrom ?Y}}. +{?A owl:disjointWith ?B. } => {{?X a ?A. ?X a ?B}=>false}. + + +########################################################### +# disjointUnionOf +######################################################### + +{?C owl:disjointUnionOf ?L. ?A list:in ?L. ?B list:in ?L. ?A log:notEqualTo ?B} => {?A owl:disjointWith ?B. ?C owl:unionOf ?L}. + +################################################## +# distinct members +################################################## +{?A owl:distinctMembers ?L. ?X list:in ?L. ?Y list:in ?L. ?X log:notEqualTo ?Y} => {?X owl:differentFrom ?Y}. + +###################################################### +# equivalent property +###################################################### +{?P owl:equivalentProperty ?Q. } => {{?S ?P ?O}=>{?S ?Q ?O}. {?S ?Q ?O} => {?S ?P ?O}}. +{?P owl:equivalentProperty ?Q} => {?Q owl:equivalentProperty ?P}. +{?P owl:equivalentProperty ?Q. ?Q owl:equivalentProperty ?R} => {?P owl:equivalentProperty ?R}. +{?P owl:equivalentProperty ?Q} => {?P rdfs:subPropertyOf ?Q. ?Q rdfs:subPropertyOf ?P}. +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:subPropertyOf ?P} => {?P owl:equivalentProperty ?Q}. + +#################################################### +# functional property +#################################################### + +{?P a owl:FunctionalProperty. } => {{?S ?P ?X. ?S ?P ?Y}=>{?X owl:sameAs ?Y}}. +{?P a owl:FunctionalProperty. } =>{{?S ?P ?X. ?S ?P ?Y. ?X owl:differentFrom ?Y}=> false}. + +############################################################ +# has value +############################################################ +{?C owl:hasValue ?V. ?C owl:onProperty ?P. } => {{?U a ?C} => {?U ?P ?V}.{?U ?P ?V} => {?U a ?C}}. +{?C owl:hasValue ?V. ?C owl:onProperty ?P. ?D owl:hasValue ?V. ?D owl:onProperty ?Q. ?P rdfs:subPropertyOf ?Q} => {?C rdfs:subClassOf ?D}. +{?C owl:hasValue ?V. ?C owl:onProperty ?P. } => {{?X ?P ?Y. ?X a ?C. ?V owl:differentFrom ?Y}=> false}. + +############################################################### +# Inverse functional property +############################################################# +{?P a owl:InverseFunctionalProperty. } => {{?X ?P ?O. ?Y ?P ?O}=>{?X owl:sameAs ?Y}}. +{?P a owl:InverseFunctionalProperty. } => {{?X ?P ?O. ?Y ?P ?O. ?X owl:differentFrom ?Y}=>false}. + +################################################################ +# irreflexive property +################################################################ + +{?P a owl:IrreflexiveProperty. } => {{?X ?P ?X}=>false}. + + +################################################################ +# max cardinality +############################################################### +{?C owl:maxCardinality 1. ?C owl:onProperty ?P. } => {{?U a ?C. ?U ?P ?Y. ?U ?P ?Z}=>{?Y owl:sameAs ?Z}}. +{?C owl:maxCardinality 0. ?C owl:onProperty ?P. } => {{?X ?P ?Y. ?X a ?C}=> false}. +{?C owl:maxCardinality 1. ?C owl:onProperty ?P. } => {{?X ?P ?Y. ?X ?P ?Z. ?X a ?C. ?Z owl:differentFrom ?Y}=>false}. + +############################################################### +# max qualified cardinality +############################################################### +{?C owl:maxQualifiedCardinality 1. ?C owl:onProperty ?P. ?C owl:onClass ?D. } => {{?U a ?C. ?U ?P ?Y. ?Y a ?D. ?U ?P ?Z. ?Z a ?D}=>{?Y owl:sameAs ?Z}}. +{?C owl:maxQualifiedCardinality 1. ?C owl:onProperty ?P. ?C owl:onClass owl:Thing. } => {{?U a ?C. ?U ?P ?Y. ?U ?P ?Z} => {?Y owl:sameAs ?Z}}. +{?C owl:maxQualifiedCardinality 0. ?C owl:onProperty ?P. ?C owl:onClass ?C. } => {{?U a ?C. ?U ?P ?Y. ?Y a ?C}=>false}. +{?C owl:maxQualifiedCardinality 0. ?C owl:onProperty ?P. ?C owl:onClass owl:Thing.} =>{{ ?U a ?C. ?U ?P ?Y}=> false}. + +################################################################## +# one of +################################################################ +{?C owl:oneOf ?L. ?X list:in ?L} => {?X a ?C}. + +############################################################## +# property disjoint with +############################################################## + +{?P owl:propertyDisjointWith ?Q. } =>{{?X ?P ?Y. ?X ?Q ?Y}=> false}. + + +################################################################# +# same as +############################################################### + +{?X owl:sameAs ?Y} => {?Y owl:sameAs ?X}. +{?X owl:sameAs ?Y. ?Y owl:sameAs ?Z} => {?X owl:sameAs ?Z}. +{?X owl:sameAs ?Y. ?X owl:differentFrom ?Y} => false. + +{?X owl:sameAs ?Y. } => {{?X ?P ?O}=>{?Y ?P ?O}}. +{?X owl:sameAs ?Y. } => {{?S ?X ?O}=>{?S ?Y ?O}}. +{?X owl:sameAs ?Y. } => {{?S ?P ?X}=>{?S ?P ?Y}}. + + +#maybe this has to go as a rule into the new file +#{}=> +#{ +#{?X owl:sameAs ?Y} => {?Y owl:sameAs ?X}. +#{?X owl:sameAs ?Y. ?Y owl:sameAs ?Z} => {?X owl:sameAs ?Z}. +#{?X owl:sameAs ?Y. ?X owl:differentFrom ?Y} => false. + +#{?X owl:sameAs ?Y. ?X ?P ?O} => {?Y ?P ?O}. +#{?X owl:sameAs ?Y. ?S ?X ?O} => {?S ?Y ?O}. +#{?X owl:sameAs ?Y. ?S ?P ?X} => {?S ?P ?Y}. + +#}. + +############################################################### +# Symmetric and transitive property +############################################################## + +{?P a owl:SymmetricProperty. } => {{?S ?P ?O}=>{?O ?P ?S}}. +{?P a owl:TransitiveProperty. } => {{?S ?P ?X. ?X ?P ?O.}=>{?S ?P ?O}}. + +#################################################################### +# union of +################################################################### +{?C owl:unionOf ?L. ?D list:in ?L. } => {{?X a ?D}=>{?X a ?C}}. +{?C owl:unionOf ?L. ?D list:in ?L } => {?D rdfs:subClassOf ?C}. diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-triples.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-triples.n3 new file mode 100644 index 0000000..47fc208 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/instantiate-triples.n3 @@ -0,0 +1,200 @@ +PREFIX rdf: +@prefix rdfs: . +@prefix owl: . +@prefix list: . +@prefix log: . +@prefix e: . +Prefix var: +Prefix : + +#with this file we produce a new version of the ontology including additional rules for rdfs/owl-concepts + + +########################### +#rules for rdfs:subclass +######################### + + +# for pure reasoning this rule is not really necessary. It is just useful for the case that someone for example wants to specifically know all subclasses of a certain class. +{?C rdfs:subClassOf ?D. ?D rdfs:subClassOf ?E} => {?C rdfs:subClassOf ?E}. +{?C rdfs:subClassOf ?D. ?P rdfs:domain ?C} => {?P rdfs:domain ?D}. +{?C rdfs:subClassOf ?D. ?P rdfs:range ?C} => {?P rdfs:range ?D}. + + + + +############################# +# "range-rules" +############################# + + +############################# +# Adding "inverse of" rules +############################# + +{?P owl:inverseOf ?Q.}=>{ +?Q owl:inverseOf ?P +}. + + +####################################### +# Intersection rules +####################################### +{?C owl:intersectionOf ?L. ?D list:in ?L} => {?C rdfs:subClassOf ?D}. + + + +######################################### +# OWL equivalent class +######################################### + +{?A owl:equivalentClass ?B} => {?B owl:equivalentClass ?A}. +{?A owl:equivalentClass ?B. ?B owl:equivalentClass ?C} => {?A owl:equivalentClass ?C}. +{?A owl:equivalentClass ?B} => {?A rdfs:subClassOf ?B. ?B rdfs:subClassOf ?A}. +{?A rdfs:subClassOf ?B. ?B rdfs:subClassOf ?A} => {?A owl:equivalentClass ?B}. + + +###################################################### +# OWL some values from +#################################################### + +{?C owl:someValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:someValuesFrom ?Z. ?D owl:onProperty ?P. ?Y rdfs:subClassOf ?Z} => {?C rdfs:subClassOf ?D}. +{?C owl:someValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:someValuesFrom ?Y. ?D owl:onProperty ?Q. ?P rdfs:subPropertyOf ?Q} => {?C rdfs:subClassOf ?D}. + + +#################################################### +# rdfs subproperty +###################################################### + +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:subPropertyOf ?R} => {?P rdfs:subPropertyOf ?R}. +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:domain ?C} => {?P rdfs:domain ?C}. +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:range ?C} => {?P rdfs:range ?C}. + + +######################################################### +# OWL all different +####################################################### + +#open for later +{?A a owl:AllDifferent. ?A owl:members ?L. ?X list:in ?L. ?Y list:in ?L. ?X log:notEqualTo ?Y. ?X owl:sameAs ?Y} => false. +{?A a owl:AllDifferent. ?A owl:distinctMembers ?L. ?X list:in ?L. ?Y list:in ?L. ?X log:notEqualTo ?Y. ?X owl:sameAs ?Y} => false. + + +######################################################## +# OWL all disjoint +######################################################## + + + + +############################################################## +# OWL all disjoint properties +############################################################## + + +############################################################## +# OWL allValuesFrom +############################################################# + +{?C owl:allValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:allValuesFrom ?Z. ?D owl:onProperty ?P. ?Y rdfs:subClassOf ?Z} => {?C rdfs:subClassOf ?D}. +{?C owl:allValuesFrom ?Y. ?C owl:onProperty ?P. ?D owl:allValuesFrom ?Y. ?D owl:onProperty ?Q. ?P rdfs:subPropertyOf ?Q} => {?D rdfs:subClassOf ?C}. + +############################################################# +# OWL assymetric property +############################################################# + + +####################################################### +# OWL complementof +##################################################### + + +{?C owl:complementOf ?D} => {?D owl:complementOf ?C}. +{?C owl:complementOf ?D} => {?D owl:disjointWith ?C}. + +################################################### +# OWL differentFrom +################################################## + + +{?A owl:differentFrom ?B} => {?B owl:differentFrom ?A}. + +######################################################## +# OWL disjointWith +####################################################### + + +########################################################### +# disjointUnionOf +######################################################### + +{?C owl:disjointUnionOf ?L. ?A list:in ?L. ?B list:in ?L. ?A log:notEqualTo ?B} => {?A owl:disjointWith ?B. ?C owl:unionOf ?L}. + +################################################## +# distinct members +################################################## +{?A owl:distinctMembers ?L. ?X list:in ?L. ?Y list:in ?L. ?X log:notEqualTo ?Y} => {?X owl:differentFrom ?Y}. + +###################################################### +# equivalent property +###################################################### +{?P owl:equivalentProperty ?Q} => {?Q owl:equivalentProperty ?P}. +{?P owl:equivalentProperty ?Q. ?Q owl:equivalentProperty ?R} => {?P owl:equivalentProperty ?R}. +{?P owl:equivalentProperty ?Q} => {?P rdfs:subPropertyOf ?Q. ?Q rdfs:subPropertyOf ?P}. +{?P rdfs:subPropertyOf ?Q. ?Q rdfs:subPropertyOf ?P} => {?P owl:equivalentProperty ?Q}. + +#################################################### +# functional property +#################################################### + + +############################################################ +# has value +############################################################ +{?C owl:hasValue ?V. ?C owl:onProperty ?P. ?D owl:hasValue ?V. ?D owl:onProperty ?Q. ?P rdfs:subPropertyOf ?Q} => {?C rdfs:subClassOf ?D}. + +############################################################### +# Inverse functional property +############################################################# + +################################################################ +# irreflexive property +################################################################ + + + +################################################################ +# max cardinality +############################################################### + +############################################################### +# max qualified cardinality +############################################################### + +################################################################## +# one of +################################################################ +{?C owl:oneOf ?L. ?X list:in ?L} => {?X a ?C}. + +############################################################## +# property disjoint with +############################################################## + + +################################################################# +# same as +############################################################### + +{?X owl:sameAs ?Y} => {?Y owl:sameAs ?X}. +{?X owl:sameAs ?Y. ?Y owl:sameAs ?Z} => {?X owl:sameAs ?Z}. +{?X owl:sameAs ?Y. ?X owl:differentFrom ?Y} => false. + + +############################################################### +# Symmetric and transitive property +############################################################## + +#################################################################### +# union of +################################################################### +{?C owl:unionOf ?L. ?D list:in ?L } => {?D rdfs:subClassOf ?C}. diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/lists.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/lists.n3 new file mode 100644 index 0000000..50aa06b --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/preprocessing/lists.n3 @@ -0,0 +1,42 @@ +@prefix log: . +@prefix owl: . +@prefix rdfs: . +@prefix rdf: . +@prefix list: . +@prefix e: . +@prefix : . + + +#handling of lists + +{ +?x ?p ?o. +?x a rdf:List. +?x :makeList ?list. + +} +=> +{?list ?p ?o}. + +{ +?s ?p ?x. +?x a rdf:List. +?x :makeList ?list. + +} +=> +{?s ?p ?list}. + + +{ rdf:nil :makeList () } <={}. + +{?x :makeList ?list} +<= +{ +?x rdf:first ?fst. +?x rdf:rest ?rst. + +?rst :makeList ?lst. + +((?fst) ?lst) list:append ?list. +}. diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-dynamic-window-parameter-substitution-goal.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-dynamic-window-parameter-substitution-goal.n3 new file mode 100644 index 0000000..dadfabd --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-dynamic-window-parameter-substitution-goal.n3 @@ -0,0 +1,57 @@ +@prefix : . + +@prefix sd: . +@prefix sh: . + +@prefix owl: . +@prefix rdf: . +@prefix xsd: . +@prefix ssn: . +@prefix rdfs: . +@prefix sosa: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . +@prefix math: . + +# retrieve instantiated SHACL queries (only the preprocessed ones) +{ + ?q rdf:type sd:SubstitutedQuery ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes . +} +=> +{ + ?q rdf:type sd:Query ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes . +} . + +# retrieve SHACL prefixes (used in SHACL queries) +{ + ?prefixes rdf:type owl:Ontology . + + ?prefixes sh:declare ?prefix . + ?prefix ?p ?o . +} +=> +{ + ?prefixes rdf:type owl:Ontology . + + ?prefixes sh:declare ?prefix . + ?prefix ?p ?o . +} . + +# retrieve static window parameters for further substitution +{ + ?iq rdf:type sd:SubstitutedQuery ; + sd:originalQuery ?oq . + + ?oq sd:staticWindowParameters ?wp . +} +=> +{ + ?iq sd:staticWindowParameters ?wp . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-dynamic-window-parameter-substitution-rules.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-dynamic-window-parameter-substitution-rules.n3 new file mode 100644 index 0000000..6e5d0d1 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-dynamic-window-parameter-substitution-rules.n3 @@ -0,0 +1,249 @@ +@prefix : . + +@prefix sd: . +@prefix sd-window: . +@prefix sh: . + +@prefix owl: . +@prefix rdf: . +@prefix xsd: . +@prefix ssn: . +@prefix rdfs: . +@prefix sosa: . +@prefix time: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . +@prefix math: . + + +# first rule to start substitution +# -> for both the context change & monitor window parameter substitution trigger +################################################################################# + +{ + ?q rdf:type sd:Query ; + sd:queryBody ?qb ; + sd:dynamicWindowParameters ?wp ; + sh:prefixes ?prefixes . + + ?st sd:windowParameterSubstitutionTrigger sd:ContextChange . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?q ; + sd:queryBody ?qb ; + sd:windowParameters ?wp ; + sh:prefixes ?prefixes . +} . + +{ + ?q rdf:type sd:Query ; + sd:queryBody ?qb ; + sh:prefixes ?prefixes ; + sd:pattern ?p . + ?p sd:correctedWindowParameters ?wp . + + ?st sd:windowParameterSubstitutionTrigger sd:Monitor . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?q ; + sd:queryBody ?qb ; + sd:windowParameters ?wp ; + sh:prefixes ?prefixes . +} . + + +# rules to substitute literals (object of log:rawType property is rdfs:Literal) +################################################################################ + +# rule to substitute xsd:duration strings +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingWindowParameters) list:append ?vs . + ?v sd-window:type xsd:duration ; + sd-window:value ?value ; + sd-window:variable ?variable . + + ("?{" ?variable "}") string:concatenation ?variable_new . + (?s ?variable_new ?value) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:windowParameters ?remainingWindowParameters ; + sh:prefixes ?prefixes . +} . + +# rules to set units for numbers with a defined unit type (seconds, minutes or hours) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:windowParameters ?windowParameters . + + ?windowParameters list:first ?param . + ?param sd-window:type time:seconds . +} +=> +{ + ?param sd-window:unit "S" . +} . +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:windowParameters ?windowParameters . + + ?windowParameters list:first ?param . + ?param sd-window:type time:minutes . +} +=> +{ + ?param sd-window:unit "M" . +} . +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:windowParameters ?windowParameters . + + ?windowParameters list:first ?param . + ?param sd-window:type time:hours . +} +=> +{ + ?param sd-window:unit "H" . +} . + +# rule to substitute numbers with a given unit (seconds, minutes, or hours) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingWindowParameters) list:append ?vs . + ?v sd-window:unit ?unit ; + sd-window:value ?value ; + sd-window:variable ?variable . + + ("?{" ?variable "}") string:concatenation ?variable_new . + ("PT" ?value ?unit) string:concatenation ?value_new . + (?s ?variable_new ?value_new) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:windowParameters ?remainingWindowParameters ; + sh:prefixes ?prefixes . +} . + + +# rule to handle input variables that do not occur in the stream window definitions +#################################################################################### + +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingWindowParameters) list:append ?vs . + ?v sd-window:variable ?variable . + + (?variable "?" "") string:replace ?variable_name . + ("\\?\\{" ?variable_name "\\}") string:concatenation ?variable_escaped . + ?s string:notMatches ?variable_escaped . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:windowParameters ?remainingWindowParameters ; + sh:prefixes ?prefixes . +} . + + +# final formatting rules +######################### + +# replace newlines by spaces +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + (?s "\n" " ") string:replace ?s_new . + ?s_new string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_new ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . +} . + +# trim double spaces +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + (?s " " " ") string:replace ?s_new . + ?s_new string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_new ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . +} . + +# retrieve instantiated SHACL queries (only the preprocessed ones) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + ?s_replaced string:notMatches "\\s\\s" . + ?s_replaced string:notMatches "\n" . +} +=> +{ + ?iq rdf:type sd:SubstitutedQuery ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-extraction-goal.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-extraction-goal.n3 new file mode 100644 index 0000000..aa0b47b --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-extraction-goal.n3 @@ -0,0 +1,29 @@ +@prefix sd: . +@prefix sh: . + +@prefix rdfs: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . + +# extract query from proof +# -> including input variables and the static window parameters +{ + ?x a r:Inference ; + r:gives ?graph . + ?graph log:includes { + ?q a sd:Query . + ?q sd:inputVariables ?iv . + ?q sd:windowParameters ?wp . + ?q sd:pattern ?p . + } . +} +=> +{ + ?x a sd:Query . + ?x sd:inputVariables ?iv . + ?x sd:staticWindowParameters ?wp . + ?x sd:pattern ?p . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-goal.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-goal.n3 new file mode 100644 index 0000000..0a77c8d --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-goal.n3 @@ -0,0 +1,85 @@ +@prefix : . + +@prefix sd: . +@prefix sd-window: . +@prefix sh: . + +@prefix owl: . +@prefix rdf: . +@prefix xsd: . +@prefix ssn: . +@prefix rdfs: . +@prefix sosa: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . +@prefix math: . + +# retrieve instantiated SHACL queries +# -> only the preprocessed ones, i.e. the ones being the end of the substitution +# process defined in the rules file +{ + ?q rdf:type sd:SubstitutedQuery ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes ; + sd:pattern ?p . +} +=> +{ + ?q rdf:type sd:Query ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes ; + sd:pattern ?p . +} . + +# retrieve SHACL prefixes (used in SHACL queries) +{ + ?prefixes rdf:type owl:Ontology . + + ?prefixes sh:declare ?prefix . + ?prefix ?p ?o . +} +=> +{ + ?prefixes rdf:type owl:Ontology . + + ?prefixes sh:declare ?prefix . + ?prefix ?p ?o . +} . + +# retrieve static & dynamic window parameters for further substitution +{ + ?iq rdf:type sd:SubstitutedQuery ; + sd:originalQuery ?oq . + + ?oq sd:staticWindowParameters ?wp . +} +=> +{ + ?iq sd:staticWindowParameters ?wp . +} . + +{ + ?iq rdf:type sd:SubstitutedQuery ; + sd:originalQuery ?oq . + + ?oq sd:dynamicWindowParameters ?wp . +} +=> +{ + ?iq sd:dynamicWindowParameters ?wp . +} . +{ + ?iq rdf:type sd:SubstitutedQuery ; + sd:originalQuery ?oq . + + ?oq sd:dynamicWindowParameters ?wp . + ?item list:in ?wp . + ?item ?property ?object . +} +=> +{ + ?item ?property ?object . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-rules.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-rules.n3 new file mode 100644 index 0000000..691c802 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-rules.n3 @@ -0,0 +1,537 @@ +@prefix : . + +@prefix sd: . +@prefix sh: . + +@prefix owl: . +@prefix rdf: . +@prefix xsd: . +@prefix ssn: . +@prefix rdfs: . +@prefix sosa: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . +@prefix math: . + + +# rules to unify all query types +################################# + +{ + ?a sh:construct ?b . +} +=> +{ + ?a sd:queryBody ?b . +} . + +{ + ?a sh:select ?b . +} +=> +{ + ?a sd:queryBody ?b . +} . + +{ + ?a sh:ask ?b . +} +=> +{ + ?a sd:queryBody ?b . +} . + +{ + ?a sh:describe ?b . +} +=> +{ + ?a sd:queryBody ?b . +} . + + +# first rule to start substitution +################################### + +{ + ?q rdf:type sd:Query ; + sd:pattern ?p ; + sd:inputVariables ?v ; + sd:pattern [ sh:prefixes ?prefixes ] . + ?p sd:queryBody ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?q ; + sd:queryBody ?s ; + sd:inputVariables ?v ; + sd:prefixes ?prefixes . +} . + + +# rules to substitute literals (object of log:rawType property is rdfs:Literal) +################################################################################ + +# rule to substitute xsd:string literals +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 ; + list:last ?item_2 . + + ?item_2 log:rawType rdfs:Literal . + (?item_2) string:concatenation ?item_2_str . + (?item_2_str xsd:string) log:dtlit ?item_2_dt . + ?item_2_dt log:equalTo ?item_2 . + + ("\"" ?item_2 "\"") string:concatenation ?item_2_new . + (?s ?item_1 ?item_2_new) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:inputVariables ?remainingInputVariables ; + sd:prefixes ?prefixes . +} . + +# rule for any supported datatype except the ones for which EYE removes the datatype +# annotation (xsd:string, xsd:integer, xsd:double, xsd:decimal, xsd:boolean) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + sd:SupportedDataTypesList sd:contains ?dataTypes . + ?dataTypes list:member ?dataType . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 ; + list:last ?item_2 . + + ?item_2 log:rawType rdfs:Literal . + (?item_2) string:concatenation ?item_2_str . + (?item_2_str ?dataType) log:dtlit ?item_2_dt . + ?item_2_dt log:equalTo ?item_2 . + + ("\"" ?item_2 "\"^^" ?dataType) string:concatenation ?item_2_new . + (?s ?item_1 ?item_2_new) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:inputVariables ?remainingInputVariables ; + sd:prefixes ?prefixes . +} . + +# rule 1 to substitute datatypes for which EYE removes the datatype annotation, +# except xsd:string (xsd:integer, xsd:double, xsd:decimal, xsd:boolean) +# -> this rule checks for all default supported datatypes that the variable +# to be replaced is not of this type +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 ; + list:last ?item_2 . + + ?item_2 log:rawType rdfs:Literal . + (?item_2) string:concatenation ?item_2_str . + + (?item_2_str xsd:string) log:dtlit ?item_2_dt0 . + ?item_2_dt0 log:notEqualTo ?item_2 . + + (?item_2_str xsd:float) log:dtlit ?item_2_dt1 . + ?item_2_dt1 log:notEqualTo ?item_2 . + + (?item_2_str xsd:duration) log:dtlit ?item_2_dt2 . + ?item_2_dt2 log:notEqualTo ?item_2 . + + (?item_2_str xsd:dateTime) log:dtlit ?item_2_dt3 . + ?item_2_dt3 log:notEqualTo ?item_2 . + + (?item_2_str xsd:time) log:dtlit ?item_2_dt4 . + ?item_2_dt4 log:notEqualTo ?item_2 . + + (?item_2_str xsd:date) log:dtlit ?item_2_dt5 . + ?item_2_dt5 log:notEqualTo ?item_2 . + + (?item_2_str xsd:gYearMonth) log:dtlit ?item_2_dt6 . + ?item_2_dt6 log:notEqualTo ?item_2 . + + (?item_2_str xsd:gYear) log:dtlit ?item_2_dt7 . + ?item_2_dt7 log:notEqualTo ?item_2 . + + (?item_2_str xsd:gMonthDay) log:dtlit ?item_2_dt8 . + ?item_2_dt8 log:notEqualTo ?item_2 . + + (?item_2_str xsd:gDay) log:dtlit ?item_2_dt9 . + ?item_2_dt9 log:notEqualTo ?item_2 . + + (?item_2_str xsd:gMonth) log:dtlit ?item_2_dt10 . + ?item_2_dt10 log:notEqualTo ?item_2 . + + (?item_2_str xsd:hexBinary) log:dtlit ?item_2_dt11 . + ?item_2_dt11 log:notEqualTo ?item_2 . + + (?item_2_str xsd:base64Binary) log:dtlit ?item_2_dt12 . + ?item_2_dt12 log:notEqualTo ?item_2 . + + (?item_2_str xsd:anyURI) log:dtlit ?item_2_dt13 . + ?item_2_dt13 log:notEqualTo ?item_2 . + + (?item_2_str xsd:QName) log:dtlit ?item_2_dt14 . + ?item_2_dt14 log:notEqualTo ?item_2 . + + (?item_2_str xsd:NOTATION) log:dtlit ?item_2_dt15 . + ?item_2_dt15 log:notEqualTo ?item_2 . + + (?item_2_str xsd:normalizedString) log:dtlit ?item_2_dt16 . + ?item_2_dt16 log:notEqualTo ?item_2 . + + (?item_2_str xsd:token) log:dtlit ?item_2_dt17 . + ?item_2_dt17 log:notEqualTo ?item_2 . + + (?item_2_str xsd:language) log:dtlit ?item_2_dt18 . + ?item_2_dt18 log:notEqualTo ?item_2 . + + (?item_2_str xsd:NMTOKEN) log:dtlit ?item_2_dt19 . + ?item_2_dt19 log:notEqualTo ?item_2 . + + (?item_2_str xsd:NMTOKENS) log:dtlit ?item_2_dt20 . + ?item_2_dt20 log:notEqualTo ?item_2 . + + (?item_2_str xsd:Name) log:dtlit ?item_2_dt21 . + ?item_2_dt21 log:notEqualTo ?item_2 . + + (?item_2_str xsd:NCName) log:dtlit ?item_2_dt22 . + ?item_2_dt22 log:notEqualTo ?item_2 . + + (?item_2_str xsd:ID) log:dtlit ?item_2_dt23 . + ?item_2_dt23 log:notEqualTo ?item_2 . + + (?item_2_str xsd:IDREF) log:dtlit ?item_2_dt24 . + ?item_2_dt24 log:notEqualTo ?item_2 . + + (?item_2_str xsd:IDREFS) log:dtlit ?item_2_dt25 . + ?item_2_dt25 log:notEqualTo ?item_2 . + + (?item_2_str xsd:ENTITY) log:dtlit ?item_2_dt26 . + ?item_2_dt26 log:notEqualTo ?item_2 . + + (?item_2_str xsd:ENTITIES) log:dtlit ?item_2_dt27 . + ?item_2_dt27 log:notEqualTo ?item_2 . + + (?item_2_str xsd:integer) log:dtlit ?item_2_dt28 . + ?item_2_dt28 log:notEqualTo ?item_2 . + + (?item_2_str xsd:nonPositiveInteger) log:dtlit ?item_2_dt29 . + ?item_2_dt29 log:notEqualTo ?item_2 . + + (?item_2_str xsd:negativeInteger) log:dtlit ?item_2_dt30 . + ?item_2_dt30 log:notEqualTo ?item_2 . + + (?item_2_str xsd:long) log:dtlit ?item_2_dt31 . + ?item_2_dt31 log:notEqualTo ?item_2 . + + (?item_2_str xsd:int) log:dtlit ?item_2_dt32 . + ?item_2_dt32 log:notEqualTo ?item_2 . + + (?item_2_str xsd:short) log:dtlit ?item_2_dt33 . + ?item_2_dt33 log:notEqualTo ?item_2 . + + (?item_2_str xsd:byte) log:dtlit ?item_2_dt34 . + ?item_2_dt34 log:notEqualTo ?item_2 . + + (?item_2_str xsd:nonNegativeInteger) log:dtlit ?item_2_dt35 . + ?item_2_dt35 log:notEqualTo ?item_2 . + + (?item_2_str xsd:unsignedLong) log:dtlit ?item_2_dt36 . + ?item_2_dt36 log:notEqualTo ?item_2 . + + (?item_2_str xsd:unsignedInt) log:dtlit ?item_2_dt37 . + ?item_2_dt37 log:notEqualTo ?item_2 . + + (?item_2_str xsd:unsignedShort) log:dtlit ?item_2_dt38 . + ?item_2_dt38 log:notEqualTo ?item_2 . + + (?item_2_str xsd:unsignedByte) log:dtlit ?item_2_dt39 . + ?item_2_dt39 log:notEqualTo ?item_2 . + + (?item_2_str xsd:positiveInteger) log:dtlit ?item_2_dt40 . + ?item_2_dt40 log:notEqualTo ?item_2 . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes ; + sd:inStep1OfShortenedDataTypeReplacement ?v . +} . + +# rule 2 to substitute datatypes for which EYE removes the datatype annotation, +# except xsd:string (xsd:integer, xsd:double, xsd:decimal, xsd:boolean) +# -> this rule initializes the triples to check the extra supported datatypes +# in the next step +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes ; + sd:inStep1OfShortenedDataTypeReplacement ?v . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 ; + list:last ?item_2 . + ?item_2 log:rawType rdfs:Literal . + + sd:ExtraSupportedDataTypesList sd:contains ?dataTypes . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes ; + sd:inStep2OfShortenedDataTypeReplacement [ + sd:involvedListItem ?v ; + sd:remainingDataTypesToCheck ?dataTypes + ] . +} . + +# rule 3 to substitute datatypes for which EYE removes the datatype annotation, +# except xsd:string (xsd:integer, xsd:double, xsd:decimal, xsd:boolean) +# -> this rule does the actual check for all extra supported datatypes that the +# variable to be replaced is not of this type +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes ; + sd:inStep2OfShortenedDataTypeReplacement [ + sd:involvedListItem ?v ; + sd:remainingDataTypesToCheck ?dataTypes + ] . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 ; + list:last ?item_2 . + ?item_2 log:rawType rdfs:Literal . + + ?dataTypes list:first ?dt . + ((?dt) ?remainingDataTypes) list:append ?dataTypes . + + (?item_2) string:concatenation ?item_2_str . + (?item_2_str ?dt) log:dtlit ?item_2_dt . + ?item_2_dt log:notEqualTo ?item_2 . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes ; + sd:inStep2OfShortenedDataTypeReplacement [ + sd:involvedListItem ?v ; + sd:remainingDataTypesToCheck ?remainingDataTypes ; + sd:dataTypesToCheck ?dataTypes + ] . +} . + +# rule 4 to substitute datatypes for which EYE removes the datatype annotation, +# except xsd:string (xsd:integer, xsd:double, xsd:decimal, xsd:boolean) +# -> this rule does the actual substitution of the variable as a raw variable +# without any quotes or datatype annotation (since it is not of any of the +# default or extra defined datatypes) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes ; + sd:inStep2OfShortenedDataTypeReplacement [ + sd:involvedListItem ?v ; + sd:remainingDataTypesToCheck ?dataTypes + ] . + + ?dataTypes math:memberCount 0 . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 ; + list:last ?item_2 . + ?item_2 log:rawType rdfs:Literal . + + (?item_2) string:concatenation ?item_2_str . + (?s ?item_1 ?item_2_str) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:inputVariables ?remainingInputVariables ; + sd:prefixes ?prefixes . +} . + + +# rule to substitute URIs (object of log:rawType property is log:Other) +######################################################################## + +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 ; + list:last ?item_2 . + + ?item_2 log:rawType log:Other ; + log:uri ?item_2_uri . + ("<" ?item_2_uri ">") string:concatenation ?item_2_new . + + (?s ?item_1 ?item_2_new) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:inputVariables ?remainingInputVariables ; + sd:prefixes ?prefixes . +} . + + +# rule to handle input variables that do not occur in the query pattern body +############################################################################# + +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingInputVariables) list:append ?vs . + ?v rdf:first ?item_1 . + + (?item_1 "?" "\\?") string:replace ?item_1_escaped . + ?s string:notMatches ?item_1_escaped . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?remainingInputVariables ; + sd:prefixes ?prefixes . +} . + + +# final formatting rules +######################### + +# replace newlines by spaces +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + (?s "\n" " ") string:replace ?s_new . + ?s_new string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_new ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . +} . + +# trim double spaces +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + (?s " " " ") string:replace ?s_new . + ?s_new string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_new ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . +} . + +# retrieve instantiated SHACL queries (only the preprocessed ones) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:originalQuery ?origq ; + sd:queryBody ?s_replaced ; + sd:inputVariables ?vs ; + sd:prefixes ?prefixes . + + ?origq sd:pattern ?p . + + ?vs math:memberCount 0 . + + ?s_replaced string:notMatches "\\s\\s" . + ?s_replaced string:notMatches "\n" . +} +=> +{ + ?iq rdf:type sd:SubstitutedQuery ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes ; + sd:pattern ?p . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-supported-datatypes.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-supported-datatypes.n3 new file mode 100644 index 0000000..3049f60 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-input-variable-substitution-supported-datatypes.n3 @@ -0,0 +1,55 @@ +@prefix : . +@prefix xsd: . +@prefix list: . + +:DefaultSupportedDataTypesList :contains (xsd:float + xsd:nonPositiveInteger + xsd:negativeInteger + xsd:long + xsd:int + xsd:short + xsd:byte + xsd:language + xsd:duration + xsd:dateTime + xsd:time + xsd:anyURI + xsd:date + xsd:nonNegativeInteger + xsd:unsignedLong + xsd:unsignedInt + xsd:unsignedShort + xsd:unsignedByte + xsd:positiveInteger + xsd:gYearMonth + xsd:gYear + xsd:gMonthDay + xsd:gDay + xsd:gMonth + xsd:hexBinary + xsd:base64Binary + xsd:QName + xsd:NOTATION + xsd:normalizedString + xsd:token + xsd:NMTOKEN + xsd:NMTOKENS + xsd:Name + xsd:NCName + xsd:ID + xsd:IDREF + xsd:IDREFS + xsd:ENTITY + xsd:ENTITIES) . + +:ExtraSupportedDataTypesList :contains () . + +{ + :DefaultSupportedDataTypesList :contains ?a . + :ExtraSupportedDataTypesList :contains ?b . + (?a ?b) list:append ?c . +} +=> +{ + :SupportedDataTypesList :contains ?c . +} . \ No newline at end of file diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-static-window-parameter-substitution-goal.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-static-window-parameter-substitution-goal.n3 new file mode 100644 index 0000000..76b29fd --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-static-window-parameter-substitution-goal.n3 @@ -0,0 +1,45 @@ +@prefix : . + +@prefix sd: . +@prefix sh: . + +@prefix owl: . +@prefix rdf: . +@prefix xsd: . +@prefix ssn: . +@prefix rdfs: . +@prefix sosa: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . +@prefix math: . + +# retrieve instantiated SHACL queries (only the preprocessed ones) +{ + ?q rdf:type sd:SubstitutedQuery ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes . +} +=> +{ + ?q rdf:type sd:Query ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes . +} . + +# retrieve SHACL prefixes (used in SHACL queries) +{ + ?prefixes rdf:type owl:Ontology . + + ?prefixes sh:declare ?prefix . + ?prefix ?p ?o . +} +=> +{ + ?prefixes rdf:type owl:Ontology . + + ?prefixes sh:declare ?prefix . + ?prefix ?p ?o . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-static-window-parameter-substitution-rules.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-static-window-parameter-substitution-rules.n3 new file mode 100644 index 0000000..89affd3 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/query-static-window-parameter-substitution-rules.n3 @@ -0,0 +1,220 @@ +@prefix : . + +@prefix sd: . +@prefix sd-window: . +@prefix sh: . + +@prefix owl: . +@prefix rdf: . +@prefix xsd: . +@prefix ssn: . +@prefix rdfs: . +@prefix sosa: . +@prefix time: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . +@prefix math: . + + +# first rule to start substitution +################################### + +{ + ?q rdf:type sd:Query ; + sd:queryBody ?qb ; + sd:staticWindowParameters ?wp ; + sh:prefixes ?prefixes . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:queryBody ?qb ; + sd:windowParameters ?wp ; + sh:prefixes ?prefixes . +} . + + +# rules to substitute literals (object of log:rawType property is rdfs:Literal) +################################################################################ + +# rule to substitute xsd:duration strings +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingWindowParameters) list:append ?vs . + ?v list:first ?variable . + ((?item_1) ?other_items) list:append ?v . + ?other_items list:first ?value ; + list:last xsd:duration . + + (?variable "?" "") string:replace ?variable_name . + ("?{" ?variable_name "}") string:concatenation ?variable_new . + (?s ?variable_new ?value) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s_replaced ; + sd:windowParameters ?remainingWindowParameters ; + sh:prefixes ?prefixes . +} . + +# rules to set units for numbers with a defined unit type (seconds, minutes or hours) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:windowParameters ?windowParameters . + + ?windowParameters list:first ?param . + ?param list:last time:seconds . +} +=> +{ + ?param sd-window:unit "S" . +} . +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:windowParameters ?windowParameters . + + ?windowParameters list:first ?param . + ?param list:last time:minutes . +} +=> +{ + ?param sd-window:unit "M" . +} . +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:windowParameters ?windowParameters . + + ?windowParameters list:first ?param . + ?param list:last time:hours . +} +=> +{ + ?param sd-window:unit "H" . +} . + +# rule to substitute numbers with a given unit (seconds, minutes, or hours) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingWindowParameters) list:append ?vs . + ?v list:first ?variable . + ((?item_1) ?other_items) list:append ?v . + ?other_items list:first ?value . + ?v sd-window:unit ?unit . + + (?variable "?" "") string:replace ?variable_name . + ("?{" ?variable_name "}") string:concatenation ?variable_new . + ("PT" ?value ?unit) string:concatenation ?value_new . + (?s ?variable_new ?value_new) string:replace ?s_replaced . + ?s_replaced string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s_replaced ; + sd:windowParameters ?remainingWindowParameters ; + sh:prefixes ?prefixes . +} . + + +# rule to handle input variables that do not occur in the stream window definitions +#################################################################################### + +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs list:first ?v . + ((?v) ?remainingWindowParameters) list:append ?vs . + ?v rdf:first ?variable . + + (?variable "?" "") string:replace ?variable_name . + ("\\?\\{" ?variable_name "\\}") string:concatenation ?variable_escaped . + ?s string:notMatches ?variable_escaped . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s ; + sd:windowParameters ?remainingWindowParameters ; + sh:prefixes ?prefixes . +} . + + +# final formatting rules +######################### + +# replace newlines by spaces +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + (?s "\n" " ") string:replace ?s_new . + ?s_new string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s_new ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . +} . + +# trim double spaces +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + (?s " " " ") string:replace ?s_new . + ?s_new string:notEqualIgnoringCase ?s . +} +=> +{ + _:x rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s_new ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . +} . + +# retrieve instantiated SHACL queries (only the preprocessed ones) +{ + ?iq rdf:type sd:InstantiatedQuery ; + sd:queryBody ?s_replaced ; + sd:windowParameters ?vs ; + sh:prefixes ?prefixes . + + ?vs math:memberCount 0 . + + ?s_replaced string:notMatches "\\s\\s" . + ?s_replaced string:notMatches "\n" . +} +=> +{ + ?iq rdf:type sd:SubstitutedQuery ; + sd:queryBody ?s_replaced ; + sh:prefixes ?prefixes . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/trigger/trigger-context-change.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/trigger/trigger-context-change.n3 new file mode 100644 index 0000000..88ea5ca --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/trigger/trigger-context-change.n3 @@ -0,0 +1,3 @@ +@prefix sd: . + +_:trigger sd:windowParameterSubstitutionTrigger sd:ContextChange . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/trigger/trigger-monitor.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/trigger/trigger-monitor.n3 new file mode 100644 index 0000000..2a82ddf --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/trigger/trigger-monitor.n3 @@ -0,0 +1,3 @@ +@prefix sd: . + +_:trigger sd:windowParameterSubstitutionTrigger sd:Monitor . diff --git a/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/window-parameter-extraction-goal.n3 b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/window-parameter-extraction-goal.n3 new file mode 100644 index 0000000..ace3162 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/eye/n3/query-derivation/window-parameter-extraction-goal.n3 @@ -0,0 +1,44 @@ +@prefix sd: . +@prefix sh: . + +@prefix rdfs: . + +@prefix r: . +@prefix log: . +@prefix string: . +@prefix list: . + +# extract dynamic window parameters associated to queries occurring in proof (rule 1) +# -> also extract all triples with a window parameter as subject (rule 2) +{ + ?x a r:Inference ; + r:gives ?graph . + ?graph log:includes { + ?q a sd:Query . + ?q sd:inputVariables ?v . + ?q sd:pattern ?p . + } . + + ?p sd:windowParameters ?windowParameters . +} +=> +{ + ?x sd:dynamicWindowParameters ?windowParameters . +} . +{ + ?x a r:Inference ; + r:gives ?graph . + ?graph log:includes { + ?q a sd:Query . + ?q sd:inputVariables ?iv . + ?q sd:pattern ?p . + } . + + ?p sd:windowParameters ?wp . + ?item list:in ?wp . + ?item ?property ?object . +} +=> +{ + ?item ?property ?object . +} . diff --git a/src/divide-central/divide-eye/src/main/resources/sparql/prepare-context-for-query-derivation-initial.query b/src/divide-central/divide-eye/src/main/resources/sparql/prepare-context-for-query-derivation-initial.query new file mode 100644 index 0000000..2c111e4 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/sparql/prepare-context-for-query-derivation-initial.query @@ -0,0 +1,11 @@ +PREFIX sd: +PREFIX sd-query: + +INSERT { + sd-query:pattern sd:windowParameters () . +} +WHERE { + FILTER NOT EXISTS { + sd-query:pattern sd:windowParameters ?wp . + } +} diff --git a/src/divide-central/divide-eye/src/main/resources/sparql/prepare-context-for-query-derivation-loop.query b/src/divide-central/divide-eye/src/main/resources/sparql/prepare-context-for-query-derivation-loop.query new file mode 100644 index 0000000..6e6fa60 --- /dev/null +++ b/src/divide-central/divide-eye/src/main/resources/sparql/prepare-context-for-query-derivation-loop.query @@ -0,0 +1,21 @@ +PREFIX rdf: +PREFIX sd: +PREFIX sd-query: +PREFIX sd-window: + +DELETE { + sd-query:pattern sd:windowParameters ?wps ; + sd-window:parameter ?wp . +} +INSERT { + sd-query:pattern sd:windowParameters [ rdf:first ?wp ; + rdf:rest ?wps ] . +} +WHERE { + SELECT ?wps ?wp + WHERE { + sd-query:pattern sd:windowParameters ?wps ; + sd-window:parameter ?wp . + } + LIMIT 1 +} diff --git a/src/divide-central/divide-monitor/pom.xml b/src/divide-central/divide-monitor/pom.xml new file mode 100644 index 0000000..f8bf9de --- /dev/null +++ b/src/divide-central/divide-monitor/pom.xml @@ -0,0 +1,17 @@ + + + 4.0.0 + + + be.ugent.idlab + divide + 1.0 + + + divide-monitor + 1.0 + + + \ No newline at end of file diff --git a/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/Monitor.java b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/Monitor.java new file mode 100644 index 0000000..62ed55d --- /dev/null +++ b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/Monitor.java @@ -0,0 +1,9 @@ +package be.ugent.idlab.divide.monitor.interfaces; + +import be.ugent.idlab.divide.monitor.interfaces.rspengine.IRspEngineMonitor; + +public interface Monitor { + + IRspEngineMonitor getRspEngineMonitor(); + +} diff --git a/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/IRspEngineMonitor.java b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/IRspEngineMonitor.java new file mode 100644 index 0000000..f32499d --- /dev/null +++ b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/IRspEngineMonitor.java @@ -0,0 +1,21 @@ +package be.ugent.idlab.divide.monitor.interfaces.rspengine; + +/** + * Monitor of an individual RSP engine, offering callback methods in its interface + * via which an RSP engine can publish any monitoring information. + */ +public interface IRspEngineMonitor { + + /** + * Report the end of execution of a given query, containing different metrics + * about the ended query execution. + * + * @param queryId ID of the query running on the RSP engine of which the execution has ended + * @param executionTimeMs execution time of the query in milliseconds + * @param memoryUsageMb memory usage of the query in megabytes + */ + void finishQueryExecution(String queryId, + long executionTimeMs, + double memoryUsageMb); + +} diff --git a/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/RspEngineMonitor.java b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/RspEngineMonitor.java new file mode 100644 index 0000000..66174b0 --- /dev/null +++ b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/RspEngineMonitor.java @@ -0,0 +1,11 @@ +package be.ugent.idlab.divide.monitor.interfaces.rspengine; + +class RspEngineMonitor implements IRspEngineMonitor { + + @Override + public void finishQueryExecution(String queryId, long executionTimeMs, double memoryUsageMb) { + // TODO MONITOR: 06/08/2020 implement + // send execution time, memory usage & corresponding query ID to monitor stream + } + +} diff --git a/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/RspEngineMonitorFactory.java b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/RspEngineMonitorFactory.java new file mode 100644 index 0000000..f997a0d --- /dev/null +++ b/src/divide-central/divide-monitor/src/main/java/be/ugent/idlab/divide/monitor/interfaces/rspengine/RspEngineMonitorFactory.java @@ -0,0 +1,9 @@ +package be.ugent.idlab.divide.monitor.interfaces.rspengine; + +public class RspEngineMonitorFactory { + + public static IRspEngineMonitor createRspEngineMonitor() { + return new RspEngineMonitor(); + } + +} diff --git a/src/divide-central/divide-query-derivation/pom.xml b/src/divide-central/divide-query-derivation/pom.xml new file mode 100644 index 0000000..6232c13 --- /dev/null +++ b/src/divide-central/divide-query-derivation/pom.xml @@ -0,0 +1,27 @@ + + + + divide + be.ugent.idlab + 1.0 + + 4.0.0 + + divide-query-derivation + + + + + be.ugent.idlab + divide-engine + 1.0 + + + be.ugent.idlab + divide-eye + 1.0 + + + \ No newline at end of file diff --git a/src/divide-central/divide-query-derivation/src/main/java/be/ugent/idlab/divide/queryderivation/DivideQueryDeriverFactory.java b/src/divide-central/divide-query-derivation/src/main/java/be/ugent/idlab/divide/queryderivation/DivideQueryDeriverFactory.java new file mode 100644 index 0000000..8666015 --- /dev/null +++ b/src/divide-central/divide-query-derivation/src/main/java/be/ugent/idlab/divide/queryderivation/DivideQueryDeriverFactory.java @@ -0,0 +1,45 @@ +package be.ugent.idlab.divide.queryderivation; + +import be.ugent.idlab.divide.core.engine.IDivideQueryDeriver; +import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException; +import be.ugent.idlab.divide.queryderivation.eye.EyeDivideQueryDeriverFactory; + +public class DivideQueryDeriverFactory { + + /** + * Create an instance of an {@link IDivideQueryDeriver} based on the given + * {@link DivideQueryDeriverType}. + * + * @param type {@link DivideQueryDeriverType} of the created {@link IDivideQueryDeriver}, + * i.e., method or reasoner used to perform the query derivation + * @param handleTBoxDefinitionsInContext boolean specifying whether the query deriver + * should allow to specify TBox definitions in the + * context updates sent for the query derivation; + * if true, this means that the query deriver should + * scan the context for new OWL-RL axioms and rules + * upon each query derivation call, heavily impacting + * the duration of the query derivation task + * @return a new instance of {@link IDivideQueryDeriver} that is of the given type, + * i.e., that used the corresponding method or reasoner to perform it query derivation + * @throws DivideQueryDeriverException when something goes wrong during the initialization of the + * newly created {@link IDivideQueryDeriver} + * @throws IllegalArgumentException if no valid {@link DivideQueryDeriverType} is given + * (i.e., when it is null) + */ + @SuppressWarnings("SwitchStatementWithTooFewBranches") + public static IDivideQueryDeriver createInstance(DivideQueryDeriverType type, + boolean handleTBoxDefinitionsInContext) + throws DivideQueryDeriverException { + if (type == null) { + throw new IllegalArgumentException("No valid query deriver type given"); + } + switch (type) { + case EYE: + return EyeDivideQueryDeriverFactory.createInstance(handleTBoxDefinitionsInContext); + + default: + throw new IllegalArgumentException("No valid query deriver type given"); + } + } + +} diff --git a/src/divide-central/divide-query-derivation/src/main/java/be/ugent/idlab/divide/queryderivation/DivideQueryDeriverType.java b/src/divide-central/divide-query-derivation/src/main/java/be/ugent/idlab/divide/queryderivation/DivideQueryDeriverType.java new file mode 100644 index 0000000..4a2f816 --- /dev/null +++ b/src/divide-central/divide-query-derivation/src/main/java/be/ugent/idlab/divide/queryderivation/DivideQueryDeriverType.java @@ -0,0 +1,7 @@ +package be.ugent.idlab.divide.queryderivation; + +public enum DivideQueryDeriverType { + + EYE + +} diff --git a/src/divide-central/divide-server/pom.xml b/src/divide-central/divide-server/pom.xml new file mode 100644 index 0000000..ccfb766 --- /dev/null +++ b/src/divide-central/divide-server/pom.xml @@ -0,0 +1,107 @@ + + + + divide + be.ugent.idlab + 1.0 + + 4.0.0 + + divide-server + + + UTF-8 + 1.8 + 1.8 + + + + + maven-restlet + Public online Restlet repository + https://maven.restlet.org + + + + + + + org.apache.commons + commons-configuration2 + 2.8.0 + + + + + + be.ugent.idlab + divide-engine + 1.0 + + + + be.ugent.idlab + divide-api + 1.0 + + + + be.ugent.idlab + divide-query-derivation + 1.0 + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + 2.1 + + + copy-dependencies + package + + copy-dependencies + + + ${project.build.directory}/lib + false + false + true + + + + + + + + maven-assembly-plugin + 2.6 + + + jar-with-dependencies + + + + be.ugent.idlab.divide.DivideServer + + + + + + make-assembly + package + + single + + + + + + + \ No newline at end of file diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/DivideServer.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/DivideServer.java new file mode 100644 index 0000000..72e1f81 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/DivideServer.java @@ -0,0 +1,415 @@ +package be.ugent.idlab.divide; + +import be.ugent.idlab.divide.api.DivideApiComponentFactory; +import be.ugent.idlab.divide.configuration.DivideConfig; +import be.ugent.idlab.divide.configuration.DivideQueryAsRspQlOrSparqlConfig; +import be.ugent.idlab.divide.configuration.DivideQueryConfig; +import be.ugent.idlab.divide.configuration.IDivideQueryConfig; +import be.ugent.idlab.divide.core.component.IComponent; +import be.ugent.idlab.divide.core.context.ContextEnrichment; +import be.ugent.idlab.divide.core.engine.DivideEngineFactory; +import be.ugent.idlab.divide.core.engine.IDivideEngine; +import be.ugent.idlab.divide.core.engine.IDivideQueryDeriver; +import be.ugent.idlab.divide.core.exception.DivideInvalidInputException; +import be.ugent.idlab.divide.core.exception.DivideNotInitializedException; +import be.ugent.idlab.divide.core.exception.DivideQueryDeriverException; +import be.ugent.idlab.divide.core.query.IDivideQuery; +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserInput; +import be.ugent.idlab.divide.core.query.parser.DivideQueryParserOutput; +import be.ugent.idlab.divide.core.query.parser.InputQueryLanguage; +import be.ugent.idlab.divide.core.query.parser.InvalidDivideQueryParserInputException; +import be.ugent.idlab.divide.queryderivation.DivideQueryDeriverFactory; +import be.ugent.idlab.divide.queryderivation.DivideQueryDeriverType; +import be.ugent.idlab.divide.util.LogConstants; +import be.ugent.idlab.divide.util.component.ComponentEntry; +import be.ugent.idlab.divide.util.component.ComponentEntryParserException; +import be.ugent.idlab.divide.util.component.CsvComponentEntryParser; +import be.ugent.idlab.kb.IKnowledgeBase; +import be.ugent.idlab.kb.api.KbApiComponentFactory; +import be.ugent.idlab.kb.jena3.KnowledgeBaseFactory; +import be.ugent.idlab.kb.jena3.KnowledgeBaseType; +import be.ugent.idlab.util.io.IOUtilities; +import be.ugent.idlab.util.rdf.jena3.owlapi4.JenaUtilities; +import org.apache.commons.configuration2.ex.ConfigurationException; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileNotFoundException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class DivideServer { + + private static final Logger LOGGER = LoggerFactory.getLogger(DivideServer.class.getName()); + + /** + * Entry point of application: creates a DIVIDE server & starts it. + * + * @param args command line arguments + */ + public static void main(String[] args) { + try { + if (args.length == 1 || args.length == 2) { + new DivideServer().start(args); + } else { + System.out.println("Usage: DivideServer []"); + } + } catch (Exception e) { + LOGGER.error("Error during DIVIDE server lifetime", e); + } + } + + private void start(String[] filePaths) throws Exception { + // initialize Jena properly + org.apache.jena.query.ARQ.init(); + + // initialize logging + System.setProperty("org.restlet.engine.loggerFacadeClass", + "org.restlet.ext.slf4j.Slf4jLoggerFacade"); + + // initialize configuration properties + DivideConfig config; + try { + config = DivideConfig.getInstance(filePaths[0]); + } catch (ConfigurationException | FileNotFoundException e) { + LOGGER.error("Error while reading the configuration file {}", filePaths[0], e); + System.out.println("Specified configuration file does not exist or is not valid"); + return; + } + + // initialize knowledge base + String baseIri = config.getBaseIriOfKnowledgeBase(); + KnowledgeBaseType knowledgeBaseType = config.getKnowledgeBaseType(); + IKnowledgeBase knowledgeBase = KnowledgeBaseFactory.getKnowledgeBase( + knowledgeBaseType, baseIri); + + // create a DIVIDE query deriver that uses the EYE reasoner + IDivideQueryDeriver divideQueryDeriver = DivideQueryDeriverFactory. + createInstance(DivideQueryDeriverType.EYE, + config.shouldHandleTBoxDefinitionsInContext()); + + // load DIVIDE ontology files + LOGGER.info("Loading ontology..."); + Model divideOntologyModel = ModelFactory.createDefaultModel(); + for (String ontologyFile : config.getDivideOntologyFilePaths()) { + LOGGER.info("-> ontology file: {}", ontologyFile); + String fileContent = IOUtilities.readFileIntoString(ontologyFile); + if (!fileContent.trim().isEmpty()) { + Model model = JenaUtilities.parseString(fileContent); + if (model != null) { + divideOntologyModel.add(model); + } else { + throw new IllegalArgumentException( + String.format("Ontology file %s contains invalid RDF", ontologyFile)); + } + } + } + + // create and initialize DIVIDE engine + IDivideEngine divideEngine = DivideEngineFactory.createInstance(); + divideEngine.initialize( + divideQueryDeriver, + knowledgeBase, + divideOntologyModel, + config.shouldStopRspEngineStreamsOnContextChanges(), + config.shouldProcessUnmappedVariableMatchesInParser(), + config.shouldValidateUnboundVariablesInRspQlQueryBodyInParser()); + + // initialize list of DIVIDE queries in configuration + // (wrongly configured DIVIDE queries lead to an IllegalArgumentException) + LOGGER.debug(LogConstants.METRIC_MARKER, "INIT_QUERIES_START"); + initializeDivideQueries(divideEngine, config.getDivideQueryPropertiesFiles()); + initializeDivideQueriesAsRspQlOrSparql( + divideEngine, config.getDivideQueryAsSparqlPropertiesFiles(), + InputQueryLanguage.SPARQL); + initializeDivideQueriesAsRspQlOrSparql( + divideEngine, config.getDivideQueryAsRspQlPropertiesFiles(), + InputQueryLanguage.RSP_QL); + LOGGER.debug(LogConstants.METRIC_MARKER, "INIT_QUERIES_END"); + + // initialize list of components in configuration (if specified) + // (wrongly configured components lead to an IllegalArgumentException) + if (filePaths.length > 1) { + LOGGER.debug(LogConstants.METRIC_MARKER, "INIT_COMPONENTS_START"); + initializeComponents(divideEngine, filePaths[1]); + LOGGER.debug(LogConstants.METRIC_MARKER, "INIT_COMPONENTS_END"); + } + + // create and start DIVIDE API + DivideApiComponentFactory.createRestApiComponent( + divideEngine, config.getHost(), config.getDivideServerPort(), "/divide").start(); + LOGGER.info("Started DIVIDE server API at http://{}:{}/divide", + config.getHost(), config.getDivideServerPort()); + + // create and start Knowledge Base API + KbApiComponentFactory.createRestApiComponent( + knowledgeBase, config.getHost(), config.getKnowledgeBaseServerPort(), "/kb").start(); + LOGGER.info("Started Knowledge Base server API at http://{}:{}/kb", + config.getHost(), config.getKnowledgeBaseServerPort()); + } + + private void initializeDivideQueries(IDivideEngine divideEngine, + List divideQueryPropertiesFiles) { + // loop over all specified properties files of a DIVIDE query + for (String queryPropertiesFile : divideQueryPropertiesFiles) { + String queryName = ""; + try { + // read DIVIDE query properties file into config object + DivideQueryConfig divideQueryConfig = + DivideQueryConfig.getInstance(queryPropertiesFile); + + // retrieve query name + queryName = divideQueryConfig.getQueryName(); + + // retrieve query pattern + String queryPattern = IOUtilities.removeWhiteSpace( + IOUtilities.readFileIntoString( + divideQueryConfig.getQueryPatternFilePath())); + if (queryPattern == null || queryPattern.trim().isEmpty()) { + throw new IllegalArgumentException(String.format( + "Query pattern file invalid or not specified for query '%s'", queryName)); + } + + // retrieve sensor query rule + String sensorQueryRule = IOUtilities.removeWhiteSpace( + IOUtilities.readFileIntoString( + divideQueryConfig.getSensorQueryRuleFilePath())); + if (sensorQueryRule == null || sensorQueryRule.trim().isEmpty()) { + throw new IllegalArgumentException(String.format( + "Sensor query rule file invalid or not specified for query '%s'", queryName)); + } + + // retrieve goal + String goal = IOUtilities.removeWhiteSpace( + IOUtilities.readFileIntoString(divideQueryConfig.getGoalFilePath())); + if (goal == null || goal.trim().isEmpty()) { + throw new IllegalArgumentException(String.format( + "Goal file invalid or not specified for query '%s'", queryName)); + } + + // retrieve context enrichment + ContextEnrichment contextEnrichment = initializeContextEnrichment( + divideQueryConfig, queryName); + + // create and add DIVIDE query to the DIVIDE engine + IDivideQuery divideQuery = divideEngine.addDivideQuery( + queryName, queryPattern, sensorQueryRule, goal, contextEnrichment); + if (divideQuery == null) { + throw new IllegalArgumentException(String.format( + "DIVIDE query with name '%s' already exists", queryName)); + } + + } catch (ConfigurationException | FileNotFoundException e) { + LOGGER.error("Error while reading the DIVIDE query properties file '{}'", + queryPropertiesFile, e); + throw new IllegalArgumentException(e); + + } catch (IllegalArgumentException e) { + LOGGER.error("Error in configuration of DIVIDE query '{}'", queryName, e); + throw e; + + } catch (DivideNotInitializedException e) { + LOGGER.error("DIVIDE engine is not properly initialized", e); + throw new IllegalStateException(e); + + } catch (DivideInvalidInputException e) { + LOGGER.error("Error when registering new DIVIDE query because input is invalid", e); + throw new RuntimeException(e); + + } catch (DivideQueryDeriverException e) { + LOGGER.error("Error when registering new query at query deriver", e); + throw new RuntimeException(e); + } + } + } + + private void initializeDivideQueriesAsRspQlOrSparql(IDivideEngine divideEngine, + List divideQueryPropertiesFiles, + InputQueryLanguage inputQueryLanguage) { + // loop over all specified properties files of a DIVIDE query + for (String queryPropertiesFile : divideQueryPropertiesFiles) { + String queryName = ""; + try { + LOGGER.info("Trying to add new DIVIDE query from properties file {} ({} input) ", + queryPropertiesFile, inputQueryLanguage.toString()); + + // read DIVIDE query properties file into config object + DivideQueryAsRspQlOrSparqlConfig divideQueryConfig = + DivideQueryAsRspQlOrSparqlConfig.getInstance(queryPropertiesFile); + + // retrieve query name + queryName = divideQueryConfig.getQueryName(); + + // retrieve stream query + String streamQuery = null; + if (!divideQueryConfig.getStreamQueryFilePath().isEmpty()) { + streamQuery = IOUtilities.removeWhiteSpace( + IOUtilities.readFileIntoString( + divideQueryConfig.getStreamQueryFilePath())); + if (streamQuery == null || streamQuery.trim().isEmpty()) { + throw new IllegalArgumentException(String.format( + "Stream query file invalid or not specified for query '%s'", queryName)); + } + } + + // retrieve intermediate queries + List intermediateQueries = new ArrayList<>(); + for (String intermediateQueryFilePath : + divideQueryConfig.getIntermediateQueryFilePaths()) { + String intermediateQuery = IOUtilities.removeWhiteSpace( + IOUtilities.readFileIntoString(intermediateQueryFilePath)); + if (intermediateQuery == null || intermediateQuery.trim().isEmpty()) { + throw new IllegalArgumentException(String.format( + "Intermediate query file(s) specified for query '%s'" + + "are non-existent, invalid or empty ", queryName)); + } + intermediateQueries.add(intermediateQuery); + } + + // retrieve final query + String finalQuery = null; + if (!divideQueryConfig.getFinalQueryFilePath().isEmpty()) { + finalQuery = IOUtilities.removeWhiteSpace( + IOUtilities.readFileIntoString( + divideQueryConfig.getFinalQueryFilePath())); + if (finalQuery == null || finalQuery.trim().isEmpty()) { + throw new IllegalArgumentException(String.format( + "Final query file invalid or not specified for query '%s'", queryName)); + } + } + + // retrieve variable mapping of stream to final query + Map streamToFinalQueryVariableMapping; + try { + streamToFinalQueryVariableMapping = + divideQueryConfig.getStreamToFinalQueryVariableMapping(); + } catch (ConfigurationException e) { + throw new IllegalArgumentException( + String.format("Stream to final query variable mapping " + + "invalid for query '%s'", queryName), e); + } + + // parse DIVIDE query input + LOGGER.debug(LogConstants.METRIC_MARKER, "QUERY_PARSING_START"); + DivideQueryParserInput divideQueryParserInput = new DivideQueryParserInput( + inputQueryLanguage, + divideQueryConfig.getStreamWindows(), + streamQuery, + intermediateQueries, + finalQuery, + divideQueryConfig.getSolutionModifier(), + streamToFinalQueryVariableMapping); + DivideQueryParserOutput divideQueryParserOutput = + divideEngine.getQueryParser(). + parseDivideQuery(divideQueryParserInput); + LOGGER.debug(LogConstants.METRIC_MARKER, "QUERY_PARSING_END"); + + // retrieve context enrichment + ContextEnrichment contextEnrichment = initializeContextEnrichment( + divideQueryConfig, queryName); + + // create and add DIVIDE query to the DIVIDE engine + IDivideQuery divideQuery = divideEngine.addDivideQuery( + queryName, + divideQueryParserOutput.getQueryPattern(), + divideQueryParserOutput.getSensorQueryRule(), + divideQueryParserOutput.getGoal(), + contextEnrichment); + if (divideQuery == null) { + throw new IllegalArgumentException(String.format( + "DIVIDE query with name '%s' already exists", queryName)); + } + + LOGGER.info("Successfully added new DIVIDE query '{}'", queryName); + + } catch (ConfigurationException | FileNotFoundException e) { + LOGGER.error("Error while reading the DIVIDE query properties file '{}'", + queryPropertiesFile, e); + throw new IllegalArgumentException(e); + + } catch (IllegalArgumentException e) { + LOGGER.error("Error in configuration of DIVIDE query '{}'", queryName, e); + throw e; + + } catch (DivideNotInitializedException e) { + LOGGER.error("DIVIDE engine is not properly initialized", e); + throw new IllegalStateException(e); + + } catch (InvalidDivideQueryParserInputException e) { + LOGGER.error("Error when parsing DIVIDE query input", e); + throw new RuntimeException(e); + + } catch (DivideInvalidInputException e) { + LOGGER.error("Error when registering new DIVIDE query because input is invalid", e); + throw new RuntimeException(e); + + } catch (DivideQueryDeriverException e) { + LOGGER.error("Error when registering new query at query deriver", e); + throw new RuntimeException(e); + } + } + } + + private void initializeComponents(IDivideEngine divideEngine, + String componentsFile) { + try { + // parse component entries specified in file + List componentEntries = + CsvComponentEntryParser.parseComponentEntryFile(componentsFile); + + // register all components to the DIVIDE engine + for (ComponentEntry componentEntry : componentEntries) { + IComponent component = divideEngine.registerComponent( + new ArrayList<>(componentEntry.getContextIris()), + componentEntry.getRspQueryLanguage(), + componentEntry.getRspEngineUrl()); + if (component == null) { + throw new IllegalArgumentException( + "Components file contains invalid or duplicate entries"); + } + } + + } catch (ComponentEntryParserException | DivideInvalidInputException e) { + String message = "Specified components file contains invalid inputs"; + LOGGER.error(message, e); + throw new IllegalArgumentException(message, e); + + } catch (DivideNotInitializedException e) { + // can be ignored - will never occur since server first explicitly + // initializes the DIVIDE engine + } + } + + private ContextEnrichment initializeContextEnrichment(IDivideQueryConfig config, + String queryName) { + // retrieve list of queries + List queries = new ArrayList<>(); + for (String queryFilePath : + config.getContextEnrichmentQueryFilePaths()) { + String query = IOUtilities.removeWhiteSpace( + IOUtilities.readFileIntoString(queryFilePath)); + if (query == null || query.trim().isEmpty()) { + throw new IllegalArgumentException(String.format( + "Context-enriching query file(s) specified for query '%s'" + + "are non-existent, invalid or empty ", queryName)); + } + queries.add(query); + } + + // return basic context enrichment if no queries are present + if (queries.isEmpty()) { + return new ContextEnrichment(); + } else { + // otherwise, retrieve context enrichment settings + boolean doReasoning = config.getContextEnrichmentDoReasoning(); + boolean executeOnOntologyTriples = config.getContextEnrichmentExecuteOnOntologyTriples(); + + // -> and create context enrichment + return new ContextEnrichment(doReasoning, executeOnOntologyTriples, queries); + } + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideConfig.java new file mode 100644 index 0000000..43fe907 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideConfig.java @@ -0,0 +1,302 @@ +package be.ugent.idlab.divide.configuration; + +import be.ugent.idlab.divide.configuration.legacy.DivideQueryAsRspQlOrSparqlConfig; +import be.ugent.idlab.divide.configuration.legacy.DivideQueryConfig; +import be.ugent.idlab.divide.configuration.util.CustomJsonConfiguration; +import be.ugent.idlab.kb.jena3.KnowledgeBaseType; +import be.ugent.idlab.util.io.IOUtilities; +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import org.apache.commons.configuration2.HierarchicalConfiguration; +import org.apache.commons.configuration2.JSONConfiguration; +import org.apache.commons.configuration2.ex.ConfigurationException; +import org.apache.commons.configuration2.tree.ImmutableNode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Configuration of the DIVIDE server (including engine, DIVIDE API + * and Knowledge Base API configuration parameters). + */ +public class DivideConfig { + + private static final Logger LOGGER = LoggerFactory.getLogger(DivideConfig.class); + + private static final String SERVER_HOST = "server.host"; + private static final String SERVER_PORT_DIVIDE = "server.port.divide"; + private static final String SERVER_PORT_KB = "server.port.kb"; + + private static final String DIVIDE_KB_TYPE = "divide.kb.type"; + private static final String DIVIDE_KB_BASE_IRI = "divide.kb.baseIri"; + + private static final String DIVIDE_ENGINE_STOP_RSP_ENGINE_STREAMS_ON_CONTEXT_CHANGES = + "divide.engine.stopRspEngineStreamsOnContextChanges"; + private static final String DIVIDE_ENGINE_PARSER_PROCESS_UNMAPPED_VARIABLE_MATCHES = + "divide.engine.parser.processUnmappedVariableMatches"; + private static final String DIVIDE_ENGINE_PARSER_VALIDATE_UNBOUND_VARIABLES_IN_RSP_QL_QUERY_BODY = + "divide.engine.parser.validateUnboundVariablesInRspQlQueryBody"; + + private static final String DIVIDE_REASONER_HANDLE_TBOX_DEFINITIONS_IN_CONTEXT = + "divide.reasoner.handleTboxDefinitionsInContext"; + + private static final String DIVIDE_ONTOLOGY_DIRECTORY = "divide.ontology.dir"; + private static final String DIVIDE_ONTOLOGY_FILES = "divide.ontology.files"; + + private static final String DIVIDE_QUERIES_DIVIDE = "divide.queries.divide"; + private static final String DIVIDE_QUERIES_SPARQL = "divide.queries.sparql"; + private static final String DIVIDE_QUERIES_RSP_QL = "divide.queries.rspql"; + + private final JSONConfiguration config; + private final String configFileDirectory; + + private DivideConfig(String propertiesFilePath) throws ConfigurationException, FileNotFoundException { + config = new CustomJsonConfiguration(propertiesFilePath); + configFileDirectory = new File(propertiesFilePath) + .getAbsoluteFile().getParentFile().getAbsolutePath(); + } + + /** + * Creates a DIVIDE config object based on the given properties file. + * + * @param propertiesFile path to properties file + * @return an instantiated DIVIDE config object which can be used to retrieve + * the configuration parameters of the DIVIDE server + * @throws ConfigurationException if the properties file is invalid + * @throws FileNotFoundException if the properties file does not exist + * + */ + public static DivideConfig getInstance(String propertiesFile) + throws ConfigurationException, FileNotFoundException { + return new DivideConfig(propertiesFile); + } + + public static void main(String[] args) throws Exception { + DivideConfig c = getInstance("divide.properties.json"); + + Iterator i = c.config.getKeys(); + while (i.hasNext()) { + String next = i.next(); + //System.out.println(next); + } + System.out.println("---"); + + + System.out.println(c.config.getProperties("divide")); + + for (HierarchicalConfiguration stream_graph_names : c.config.configurationsAt("stream_graph_names")) { + System.out.println(stream_graph_names.getString("name")); + } + System.out.println(c.config.configurationsAt("stream_graph_names")); + + System.out.println(c.config.getString("stream_graph_names.$0.name")); + + i = c.config.getKeys("server."); + while (i.hasNext()) { + String next = i.next(); + //System.out.println(next); + } + + String json = " { \"server\": {\n" + + " \"host\": \"localhost\",\n" + + " \"port\":{\n" + + " \"divide\": 8342,\n" + + " \"kb\": 8343\n" + + " }\n" + + " }}"; + JsonObject x = new Gson().fromJson(json, JsonObject.class); + //System.out.println(x.get("server")); + } + + /** + * @return host on which the DIVIDE API & Knowledge Base API should be exposed + * (default: 'localhost') + */ + public String getHost() { + return config.getString(SERVER_HOST, "localhost"); + } + + /** + * @return port on which the DIVIDE API should be exposed (default: 5000) + */ + public int getDivideServerPort() { + return config.getInt(SERVER_PORT_DIVIDE, 5000); + } + + /** + * @return port on which the Knowledge Base API should be exposed + * (default: 5001) + */ + public int getKnowledgeBaseServerPort() { + return config.getInt(SERVER_PORT_KB, 5001); + } + + /** + * @return {@link KnowledgeBaseType} representing the type of knowledge base + * that should be instantiated for the DIVIDE engine + * (default: {@link KnowledgeBaseType#JENA}) + */ + public KnowledgeBaseType getKnowledgeBaseType() { + KnowledgeBaseType defaultType = KnowledgeBaseType.JENA; + KnowledgeBaseType configType = + KnowledgeBaseType.fromString(config.getString(DIVIDE_KB_TYPE)); + return configType != null ? configType : defaultType; + } + + /** + * @return base IRI of the knowledge base that should be used for resolving + * IRIs used within the context of the started DIVIDE engine + * (default: 'http://idlab.ugent.be/divide') + */ + public String getBaseIriOfKnowledgeBase() { + return config.getString(DIVIDE_KB_BASE_IRI, "http://idlab.ugent.be/divide"); + } + + /** + * @return whether DIVIDE should pause RSP engine streams on a component + * when context changes are detected that trigger the DIVIDE query + * derivation for that component (default: true) + */ + public boolean shouldStopRspEngineStreamsOnContextChanges() { + return config.getBoolean(DIVIDE_ENGINE_STOP_RSP_ENGINE_STREAMS_ON_CONTEXT_CHANGES, true); + } + + /** + * @return whether the DIVIDE query parser should process unmapped variable matches in the + * query input (i.e., identical variable names occurring in both the stream and + * final query, that are not explicitly defined in the variable mapping file of + * the input of that query) - if true, matching variable names are considered as + * variable mappings; if false, matching variable names are considered coincidence + * and are not treated as mappings (default: false) + */ + public boolean shouldProcessUnmappedVariableMatchesInParser() { + return config.getBoolean(DIVIDE_ENGINE_PARSER_PROCESS_UNMAPPED_VARIABLE_MATCHES, false); + } + + /** + * @return whether the DIVIDE query parser should validate variables in the RSP-QL query + * body generated by the DIVIDE query parser, should be validated (= checked for occurrence + * in the WHERE clause of the query or in the set of input variables that will be substituted + * during the DIVIDE query derivation) during parsing (default: true) + */ + public boolean shouldValidateUnboundVariablesInRspQlQueryBodyInParser() { + return config.getBoolean( + DIVIDE_ENGINE_PARSER_VALIDATE_UNBOUND_VARIABLES_IN_RSP_QL_QUERY_BODY, true); + } + + /** + * @return whether DIVIDE should allow to specify TBox definitions in the + * context updates sent for the query derivation; if true, this means + * that DIVIDE should scan all context updates for new OWL-RL axioms + * and rules upon each query derivation call, heavily impacting the + * derivation of queries upon context updates (default: false) + */ + public boolean shouldHandleTBoxDefinitionsInContext() { + return config.getBoolean(DIVIDE_REASONER_HANDLE_TBOX_DEFINITIONS_IN_CONTEXT, false); + } + + /** + * @return list of canonical path names of files containing the ontology (TBox) data + * used by this DIVIDE engine (default: empty list) + */ + public List getDivideOntologyFilePaths() { + String ontologyDirectoryPath = config.getString(DIVIDE_ONTOLOGY_DIRECTORY, "."); + if (ontologyDirectoryPath != null && !Paths.get(ontologyDirectoryPath).isAbsolute()) { + ontologyDirectoryPath = Paths.get(configFileDirectory, ontologyDirectoryPath).toString(); + } + + String[] ontologyFileNames = config.getStringArray(DIVIDE_ONTOLOGY_FILES); + + List ontologyFilePaths = new ArrayList<>(); + for (String ontologyFileName : ontologyFileNames) { + try { + File ontologyFile = new File(ontologyFileName); + if (!ontologyFile.isAbsolute()) { + ontologyFile = new File(ontologyDirectoryPath, ontologyFileName); + } + + String canonicalPath = ontologyFile.getCanonicalPath(); + if (IOUtilities.isValidFile(canonicalPath)) { + ontologyFilePaths.add(canonicalPath); + } else { + throw new IOException(String.format("%s is not a valid file", canonicalPath)); + } + } catch (IOException e) { + LOGGER.error("Error with finding ontology file {}", ontologyFileName, e); + throw new RuntimeException(e); + } + } + + return ontologyFilePaths; + } + + /** + * @return list of path names of files that each specify the properties of a + * single DIVIDE query that should be loaded into the DIVIDE engine; + * these properties can be read with the {@link DivideQueryConfig} class + * (default: empty list) + */ + public List getDivideQueryPropertiesFiles() { + String[] queries = config.getStringArray(DIVIDE_QUERIES_DIVIDE); + if (queries == null) { + return new ArrayList<>(); + } else { + return Arrays.stream(queries) + .filter(Objects::nonNull) + .map(path -> !Paths.get(path).isAbsolute() ? + Paths.get(configFileDirectory, path).toString() : path) + .collect(Collectors.toList()); + } + } + + /** + * @return list of path names of files that each specify the properties of a + * single DIVIDE query that should be loaded into the DIVIDE engine; + * the specification is based on a series of 1 or more chained SPARQL queries; + * these properties can be read with the {@link DivideQueryAsRspQlOrSparqlConfig} + * class (default: empty list) + */ + public List getDivideQueryAsSparqlPropertiesFiles() { + String[] queries = config.getStringArray(DIVIDE_QUERIES_SPARQL); + if (queries == null) { + return new ArrayList<>(); + } else { + return Arrays.stream(queries) + .filter(Objects::nonNull) + .map(path -> !Paths.get(path).isAbsolute() ? + Paths.get(configFileDirectory, path).toString() : path) + .collect(Collectors.toList()); + } + } + + /** + * @return list of path names of files that each specify the properties of a + * single DIVIDE query that should be loaded into the DIVIDE engine; + * the specification is based on a single RSP-QL query; + * these properties can be read with the {@link DivideQueryAsRspQlOrSparqlConfig} + * class (default: empty list) + */ + public List getDivideQueryAsRspQlPropertiesFiles() { + String[] queries = config.getStringArray(DIVIDE_QUERIES_RSP_QL); + if (queries == null) { + return new ArrayList<>(); + } else { + return Arrays.stream(queries) + .filter(Objects::nonNull) + .map(path -> !Paths.get(path).isAbsolute() ? + Paths.get(configFileDirectory, path).toString() : path) + .collect(Collectors.toList()); + } + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideQueryAsRspQlOrSparqlConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideQueryAsRspQlOrSparqlConfig.java new file mode 100644 index 0000000..c737730 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideQueryAsRspQlOrSparqlConfig.java @@ -0,0 +1,196 @@ +package be.ugent.idlab.divide.configuration; + +import be.ugent.idlab.divide.configuration.util.CustomJsonConfiguration; +import be.ugent.idlab.divide.core.query.parser.StreamWindow; +import org.apache.commons.configuration2.Configuration; +import org.apache.commons.configuration2.HierarchicalConfiguration; +import org.apache.commons.configuration2.JSONConfiguration; +import org.apache.commons.configuration2.ex.ConfigurationException; +import org.apache.commons.configuration2.ex.ConfigurationRuntimeException; +import org.apache.commons.configuration2.tree.ImmutableNode; +import org.apache.commons.io.FilenameUtils; + +import java.io.File; +import java.io.FileNotFoundException; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * Configuration of a single DIVIDE query that should be added to the DIVIDE + * engine upon start-up. + */ +public class DivideQueryAsRspQlOrSparqlConfig implements IDivideQueryConfig { + + private static final String STREAM_WINDOWS = "streamWindows"; + private static final String STREAM_WINDOW_STREAM_IRI = "streamIri"; + private static final String STREAM_WINDOW_WINDOW_DEFINITION = "windowDefinition"; + private static final String STREAM_WINDOW_DEFAULT_WINDOW_PARAMETER_VALUES = "defaultWindowParameterValues"; + + private static final String STREAM_QUERY = "streamQuery"; + private static final String INTERMEDIATE_QUERIES = "intermediateQueries"; + private static final String FINAL_QUERY = "finalQuery"; + private static final String SOLUTION_MODIFIER = "solutionModifier"; + private static final String STREAM_TO_FINAL_QUERY_VARIABLE_MAPPING = "streamToFinalQueryVariableMapping"; + + private static final String CONTEXT_ENRICHMENT_DO_REASONING = "contextEnrichment.doReasoning"; + private static final String CONTEXT_ENRICHMENT_EXECUTE_ON_ONTOLOGY_TRIPLES = + "contextEnrichment.executeOnOntologyTriples"; + private static final String CONTEXT_ENRICHMENT_QUERIES = "contextEnrichment.queries"; + + protected final JSONConfiguration config; + private final String queryName; + private final String configFileDirectory; + + private DivideQueryAsRspQlOrSparqlConfig(String propertiesFilePath) + throws ConfigurationException, FileNotFoundException { + config = new CustomJsonConfiguration(propertiesFilePath); + queryName = FilenameUtils.getBaseName(propertiesFilePath); + configFileDirectory = new File(propertiesFilePath) + .getAbsoluteFile().getParentFile().getAbsolutePath(); + } + + /** + * Creates a DIVIDE query config object based on the given properties file. + * + * @param propertiesFile path to query properties file + * @return an instantiated DIVIDE query config object which can be used to + * retrieve the configuration parameters of the DIVIDE query + * @throws ConfigurationException if the properties file is invalid + * @throws FileNotFoundException if the properties file does not exist + */ + public static DivideQueryAsRspQlOrSparqlConfig getInstance(String propertiesFile) + throws ConfigurationException, FileNotFoundException { + return new DivideQueryAsRspQlOrSparqlConfig(propertiesFile); + } + + /** + * @return the name of the DIVIDE query, + * which equals based on the name of the properties file + */ + public String getQueryName() { + return queryName; + } + + public List getStreamWindows() throws ConfigurationException { + List result = new ArrayList<>(); + for (HierarchicalConfiguration streamWindowConfig : + config.configurationsAt(STREAM_WINDOWS)) { + String streamIri = streamWindowConfig.getString(STREAM_WINDOW_STREAM_IRI); + if (streamIri != null) { + streamIri = "<" + streamIri +">"; + } + + String windowDefinition = streamWindowConfig.getString(STREAM_WINDOW_WINDOW_DEFINITION); + + Map defaultWindowParameters = new HashMap<>(); + try { + Configuration variableMappingConfig = + streamWindowConfig.configurationAt(STREAM_WINDOW_DEFAULT_WINDOW_PARAMETER_VALUES); + Iterator it = variableMappingConfig.getKeys(); + while (it.hasNext()) { + String key = it.next(); + String value = variableMappingConfig.getString(key, null); + if (value == null) { + throw new ConfigurationException( + "Default window parameter mapping file can only contain string values"); + } + defaultWindowParameters.put(key, value); + } + } catch (ConfigurationRuntimeException e) { + defaultWindowParameters = new HashMap<>(); + } + + if (streamIri == null) { + // invalid entry, so null returned + return null; + } + + result.add(new StreamWindow(streamIri, windowDefinition, defaultWindowParameters)); + } + return result; + } + + public String getStreamQueryFilePath() { + String path = config.getString(STREAM_QUERY, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + public List getIntermediateQueryFilePaths() { + String[] queries = config.getStringArray(INTERMEDIATE_QUERIES); + List queryList = queries == null ? new ArrayList<>() : Arrays.asList(queries); + List result = new ArrayList<>(); + for (String path : queryList) { + if (path != null && !path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + result.add(path); + } + return result; + } + + public String getFinalQueryFilePath() { + String path = config.getString(FINAL_QUERY, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + public String getSolutionModifier() { + return config.getString(SOLUTION_MODIFIER, null); + } + + public Map getStreamToFinalQueryVariableMapping() throws ConfigurationException { + Map mapping = new HashMap<>(); + try { + Configuration variableMappingConfig = + config.configurationAt(STREAM_TO_FINAL_QUERY_VARIABLE_MAPPING); + Iterator it = variableMappingConfig.getKeys(); + while (it.hasNext()) { + String key = it.next(); + String value = variableMappingConfig.getString(key, null); + if (value == null) { + throw new ConfigurationException( + "Variable mapping file can only contain string values"); + } + mapping.put(key, value); + } + return mapping; + } catch (ConfigurationRuntimeException e) { + return new HashMap<>(); + } + } + + @Override + public boolean getContextEnrichmentDoReasoning() { + return config.getBoolean(CONTEXT_ENRICHMENT_DO_REASONING, true); + } + + @Override + public boolean getContextEnrichmentExecuteOnOntologyTriples() { + return config.getBoolean(CONTEXT_ENRICHMENT_EXECUTE_ON_ONTOLOGY_TRIPLES, true); + } + + @Override + public List getContextEnrichmentQueryFilePaths() { + String[] queries = config.getStringArray(CONTEXT_ENRICHMENT_QUERIES); + List queryList = queries == null ? new ArrayList<>() : Arrays.asList(queries); + List result = new ArrayList<>(); + for (String path : queryList) { + if (path != null && !path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + result.add(path); + } + return result; + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideQueryConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideQueryConfig.java new file mode 100644 index 0000000..60ed966 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/DivideQueryConfig.java @@ -0,0 +1,124 @@ +package be.ugent.idlab.divide.configuration; + +import be.ugent.idlab.divide.configuration.util.CustomJsonConfiguration; +import org.apache.commons.configuration2.JSONConfiguration; +import org.apache.commons.configuration2.ex.ConfigurationException; +import org.apache.commons.io.FilenameUtils; + +import java.io.File; +import java.io.FileNotFoundException; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Configuration of a single DIVIDE query that should be added to the DIVIDE + * engine upon start-up. + */ +public class DivideQueryConfig implements IDivideQueryConfig { + + private static final String QUERY_PATTERN = "queryPattern"; + private static final String SENSOR_QUERY_RULE = "sensorQueryRule"; + private static final String GOAL = "goal"; + + private static final String CONTEXT_ENRICHMENT_DO_REASONING = "contextEnrichment.doReasoning"; + private static final String CONTEXT_ENRICHMENT_EXECUTE_ON_ONTOLOGY_TRIPLES = + "contextEnrichment.executeOnOntologyTriples"; + private static final String CONTEXT_ENRICHMENT_QUERIES = "contextEnrichment.queries"; + + private final JSONConfiguration config; + private final String queryName; + private final String configFileDirectory; + + private DivideQueryConfig(String propertiesFilePath) + throws ConfigurationException, FileNotFoundException { + config = new CustomJsonConfiguration(propertiesFilePath); + queryName = FilenameUtils.getBaseName(propertiesFilePath); + configFileDirectory = new File(propertiesFilePath) + .getAbsoluteFile().getParentFile().getAbsolutePath(); + } + + /** + * Creates a DIVIDE query config object based on the given properties file. + * + * @param propertiesFile path to query properties file + * @return an instantiated DIVIDE query config object which can be used to + * retrieve the configuration parameters of the DIVIDE query + * @throws ConfigurationException if the properties file is invalid + * @throws FileNotFoundException if the properties file does not exist + */ + public static DivideQueryConfig getInstance(String propertiesFile) + throws ConfigurationException, FileNotFoundException { + return new DivideQueryConfig(propertiesFile); + } + + /** + * @return the name of the DIVIDE query, + * which equals based on the name of the properties file + */ + public String getQueryName() { + return queryName; + } + + /** + * @return path to query pattern file specified in DIVIDE query config file, + * or null if not specified + */ + public String getQueryPatternFilePath() { + String path = config.getString(QUERY_PATTERN, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + /** + * @return path to sensor query rule file specified in DIVIDE query config file, + * or null if not specified + */ + public String getSensorQueryRuleFilePath() { + String path = config.getString(SENSOR_QUERY_RULE, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + /** + * @return path to goal file specified in DIVIDE query config file, + * or null if not specified + */ + public String getGoalFilePath() { + String path = config.getString(GOAL, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + @Override + public boolean getContextEnrichmentDoReasoning() { + return config.getBoolean(CONTEXT_ENRICHMENT_DO_REASONING, true); + } + + @Override + public boolean getContextEnrichmentExecuteOnOntologyTriples() { + return config.getBoolean(CONTEXT_ENRICHMENT_EXECUTE_ON_ONTOLOGY_TRIPLES, true); + } + + @Override + public List getContextEnrichmentQueryFilePaths() { + String[] queries = config.getStringArray(CONTEXT_ENRICHMENT_QUERIES); + List queryList = queries == null ? new ArrayList<>() : Arrays.asList(queries); + List result = new ArrayList<>(); + for (String path : queryList) { + if (path != null && !path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + result.add(path); + } + return result; + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/IDivideQueryConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/IDivideQueryConfig.java new file mode 100644 index 0000000..319aeca --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/IDivideQueryConfig.java @@ -0,0 +1,13 @@ +package be.ugent.idlab.divide.configuration; + +import java.util.List; + +public interface IDivideQueryConfig { + + boolean getContextEnrichmentDoReasoning(); + + boolean getContextEnrichmentExecuteOnOntologyTriples(); + + List getContextEnrichmentQueryFilePaths(); + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideConfig.java new file mode 100644 index 0000000..f0c68b8 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideConfig.java @@ -0,0 +1,256 @@ +package be.ugent.idlab.divide.configuration.legacy; + +import be.ugent.idlab.kb.jena3.KnowledgeBaseType; +import be.ugent.idlab.util.io.IOUtilities; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * Configuration of the DIVIDE server (including engine, DIVIDE API + * and Knowledge Base API configuration parameters). + */ +@SuppressWarnings("unused") +public class DivideConfig { + + private static final Logger LOGGER = LoggerFactory.getLogger(DivideConfig.class); + + private static final String DIVIDE_SERVER_HOST = "server.host"; + private static final String DIVIDE_SERVER_PORT_DIVIDE = "server.port.divide"; + private static final String DIVIDE_SERVER_PORT_KB = "server.port.kb"; + + private static final String DIVIDE_KB_TYPE = "divide.kb.type"; + private static final String DIVIDE_KB_BASE_IRI = "divide.kb.base_iri"; + + private static final String DIVIDE_ENGINE_STOP_RSP_ENGINE_STREAMS_ON_CONTEXT_CHANGES = + "divide.engine.stop_rsp_engine_streams_on_context_changes"; + private static final String DIVIDE_ENGINE_PARSER_PROCESS_UNMAPPED_VARIABLE_MATCHES = + "divide.engine.parser.process_unmapped_variable_matches"; + private static final String DIVIDE_ENGINE_PARSER_VALIDATE_UNBOUND_VARIABLES_IN_RSP_QL_QUERY_BODY = + "divide.engine.parser.validate_unbound_variables_in_rsp-ql_query_body"; + + private static final String DIVIDE_REASONER_HANDLE_TBOX_DEFINITIONS_IN_CONTEXT = + "divide.reasoner.handle_tbox_definitions_in_context"; + + private static final String DIVIDE_ONTOLOGY_DIRECTORY = "divide.ontology.dir"; + private static final String DIVIDE_ONTOLOGY = "divide.ontology"; + + private static final String DIVIDE_QUERIES = "divide.queries"; + private static final String DIVIDE_QUERIES_AS_SPARQL = "divide.queries.sparql"; + private static final String DIVIDE_QUERIES_AS_RSP_QL = "divide.queries.rspql"; + + private final Configuration config; + private final String configFileDirectory; + + private DivideConfig(String propertiesFilePath) throws ConfigurationException { + config = new PropertiesConfiguration(propertiesFilePath); + configFileDirectory = new File(propertiesFilePath) + .getAbsoluteFile().getParentFile().getAbsolutePath(); + } + + /** + * Creates a DIVIDE config object based on the given properties file. + * + * @param propertiesFile path to properties file + * @return an instantiated DIVIDE config object which can be used to retrieve + * the configuration parameters of the DIVIDE server + * @throws ConfigurationException if the properties file does not exist or is invalid + */ + public static DivideConfig getInstance(String propertiesFile) + throws ConfigurationException { + return new DivideConfig(propertiesFile); + } + + /** + * @return host on which the DIVIDE API & Knowledge Base API should be exposed + * (default: 'localhost') + */ + public String getHost() { + return config.getString(DIVIDE_SERVER_HOST, "localhost"); + } + + /** + * @return port on which the DIVIDE API should be exposed (default: 5000) + */ + public int getDivideServerPort() { + return config.getInt(DIVIDE_SERVER_PORT_DIVIDE, 5000); + } + + /** + * @return port on which the Knowledge Base API should be exposed + * (default: 5001) + */ + public int getKnowledgeBaseServerPort() { + return config.getInt(DIVIDE_SERVER_PORT_KB, 5001); + } + + /** + * @return {@link KnowledgeBaseType} representing the type of knowledge base + * that should be instantiated for the DIVIDE engine + * (default: {@link KnowledgeBaseType#JENA}) + */ + public KnowledgeBaseType getKnowledgeBaseType() { + KnowledgeBaseType defaultType = KnowledgeBaseType.JENA; + KnowledgeBaseType configType = + KnowledgeBaseType.fromString(config.getString(DIVIDE_KB_TYPE)); + return configType != null ? configType : defaultType; + } + + /** + * @return base IRI of the knowledge base that should be used for resolving + * IRIs used within the context of the started DIVIDE engine + * (default: 'http://idlab.ugent.be/divide') + */ + public String getBaseIriOfKnowledgeBase() { + return config.getString(DIVIDE_KB_BASE_IRI, "http://idlab.ugent.be/divide"); + } + + /** + * @return whether DIVIDE should pause RSP engine streams on a component + * when context changes are detected that trigger the DIVIDE query + * derivation for that component (default: true) + */ + public boolean shouldStopRspEngineStreamsOnContextChanges() { + return config.getBoolean(DIVIDE_ENGINE_STOP_RSP_ENGINE_STREAMS_ON_CONTEXT_CHANGES, true); + } + + /** + * @return whether the DIVIDE query parser should process unmapped variable matches in the + * query input (i.e., identical variable names occurring in both the stream and + * final query, that are not explicitly defined in the variable mapping file of + * the input of that query) - if true, matching variable names are considered as + * variable mappings; if false, matching variable names are considered coincidence + * and are not treated as mappings (default: false) + */ + public boolean shouldProcessUnmappedVariableMatchesInParser() { + return config.getBoolean(DIVIDE_ENGINE_PARSER_PROCESS_UNMAPPED_VARIABLE_MATCHES, false); + } + + /** + * @return whether the DIVIDE query parser should validate variables in the RSP-QL query + * body generated by the DIVIDE query parser, should be validated (= checked for occurrence + * in the WHERE clause of the query or in the set of input variables that will be substituted + * during the DIVIDE query derivation) during parsing (default: true) + */ + public boolean shouldValidateUnboundVariablesInRspQlQueryBodyInParser() { + return config.getBoolean( + DIVIDE_ENGINE_PARSER_VALIDATE_UNBOUND_VARIABLES_IN_RSP_QL_QUERY_BODY, true); + } + + /** + * @return whether DIVIDE should allow to specify TBox definitions in the + * context updates sent for the query derivation; if true, this means + * that DIVIDE should scan all context updates for new OWL-RL axioms + * and rules upon each query derivation call, heavily impacting the + * derivation of queries upon context updates (default: false) + */ + public boolean shouldHandleTBoxDefinitionsInContext() { + return config.getBoolean(DIVIDE_REASONER_HANDLE_TBOX_DEFINITIONS_IN_CONTEXT, false); + } + + /** + * @return list of canonical path names of files containing the ontology (TBox) data + * used by this DIVIDE engine (default: empty list) + */ + public List getDivideOntologyFilePaths() { + String ontologyDirectoryPath = config.getString(DIVIDE_ONTOLOGY_DIRECTORY, "."); + if (ontologyDirectoryPath != null && !Paths.get(ontologyDirectoryPath).isAbsolute()) { + ontologyDirectoryPath = Paths.get(configFileDirectory, ontologyDirectoryPath).toString(); + } + + String[] ontologyFileNames = config.getStringArray(DIVIDE_ONTOLOGY); + + List ontologyFilePaths = new ArrayList<>(); + for (String ontologyFileName : ontologyFileNames) { + try { + File ontologyFile = new File(ontologyFileName); + if (!ontologyFile.isAbsolute()) { + ontologyFile = new File(ontologyDirectoryPath, ontologyFileName); + } + + String canonicalPath = ontologyFile.getCanonicalPath(); + if (IOUtilities.isValidFile(canonicalPath)) { + ontologyFilePaths.add(canonicalPath); + } else { + throw new IOException(String.format("%s is not a valid file", canonicalPath)); + } + } catch (IOException e) { + LOGGER.error("Error with finding ontology file {}", ontologyFileName, e); + throw new RuntimeException(e); + } + } + + return ontologyFilePaths; + } + + /** + * @return list of path names of files that each specify the properties of a + * single DIVIDE query that should be loaded into the DIVIDE engine; + * these properties can be read with the {@link DivideQueryConfig} class + * (default: empty list) + */ + public List getDivideQueryPropertiesFiles() { + String[] queries = config.getStringArray(DIVIDE_QUERIES); + if (queries == null) { + return new ArrayList<>(); + } else { + return Arrays.stream(queries) + .filter(Objects::nonNull) + .map(path -> !Paths.get(path).isAbsolute() ? + Paths.get(configFileDirectory, path).toString() : path) + .collect(Collectors.toList()); + } + } + + /** + * @return list of path names of files that each specify the properties of a + * single DIVIDE query that should be loaded into the DIVIDE engine; + * the specification is based on a series of 1 or more chained SPARQL queries; + * these properties can be read with the {@link DivideQueryAsRspQlOrSparqlConfig} + * class (default: empty list) + */ + public List getDivideQueryAsSparqlPropertiesFiles() { + String[] queries = config.getStringArray(DIVIDE_QUERIES_AS_SPARQL); + if (queries == null) { + return new ArrayList<>(); + } else { + return Arrays.stream(queries) + .filter(Objects::nonNull) + .map(path -> !Paths.get(path).isAbsolute() ? + Paths.get(configFileDirectory, path).toString() : path) + .collect(Collectors.toList()); + } + } + + /** + * @return list of path names of files that each specify the properties of a + * single DIVIDE query that should be loaded into the DIVIDE engine; + * the specification is based on a single RSP-QL query; + * these properties can be read with the {@link DivideQueryAsRspQlOrSparqlConfig} + * class (default: empty list) + */ + public List getDivideQueryAsRspQlPropertiesFiles() { + String[] queries = config.getStringArray(DIVIDE_QUERIES_AS_RSP_QL); + if (queries == null) { + return new ArrayList<>(); + } else { + return Arrays.stream(queries) + .filter(Objects::nonNull) + .map(path -> !Paths.get(path).isAbsolute() ? + Paths.get(configFileDirectory, path).toString() : path) + .collect(Collectors.toList()); + } + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryAsRspQlOrSparqlConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryAsRspQlOrSparqlConfig.java new file mode 100644 index 0000000..ebe2be6 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryAsRspQlOrSparqlConfig.java @@ -0,0 +1,133 @@ +package be.ugent.idlab.divide.configuration.legacy; + +import be.ugent.idlab.divide.core.query.parser.StreamWindow; +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.io.FilenameUtils; + +import java.io.File; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Configuration of a single DIVIDE query that should be added to the DIVIDE + * engine upon start-up. + */ +@SuppressWarnings("unused") +public class DivideQueryAsRspQlOrSparqlConfig { + + private static final Pattern STREAM_WINDOW_PATTERN = Pattern.compile( + "(<[^<>]+>)<([^<>]+)><([^<>]+)>"); + + private static final String STREAM_GRAPH_NAMES = "stream-graph-names"; + private static final String STREAM_QUERY = "stream-query"; + private static final String INTERMEDIATE_QUERIES = "intermediate-queries"; + private static final String FINAL_QUERY = "final-query"; + private static final String SOLUTION_MODIFIER = "solution-modifier"; + private static final String STREAM_TO_FINAL_QUERY_VARIABLE_MAPPING = "stream-to-final-query-variable-mapping"; + private static final String CONTEXT_ENRICHMENT = "context-enrichment"; + + protected final Configuration config; + private final String queryName; + private final String configFileDirectory; + + private DivideQueryAsRspQlOrSparqlConfig(String propertiesFilePath) throws ConfigurationException { + config = new PropertiesConfiguration(propertiesFilePath); + queryName = FilenameUtils.getBaseName(propertiesFilePath); + configFileDirectory = new File(propertiesFilePath) + .getAbsoluteFile().getParentFile().getAbsolutePath(); + } + + /** + * Creates a DIVIDE query config object based on the given properties file. + * + * @param propertiesFile path to query properties file + * @return an instantiated DIVIDE query config object which can be used to + * retrieve the configuration parameters of the DIVIDE query + * @throws ConfigurationException if the properties file does not exist or is invalid + */ + public static DivideQueryAsRspQlOrSparqlConfig getInstance(String propertiesFile) + throws ConfigurationException { + return new DivideQueryAsRspQlOrSparqlConfig(propertiesFile); + } + + /** + * @return the name of the DIVIDE query, + * which equals based on the name of the properties file + */ + public String getQueryName() { + return queryName; + } + + public List getStreamGraphNames() { + String[] streamWindowStrings = config.getStringArray(STREAM_GRAPH_NAMES); + List result = new ArrayList<>(); + if (streamWindowStrings != null) { + for (String streamWindowString : streamWindowStrings) { + Matcher m = STREAM_WINDOW_PATTERN.matcher(streamWindowString); + if (m.find()) { + result.add(new StreamWindow(m.group(1), String.format("%s %s", m.group(2), m.group(3)))); + } else { + // invalid entry, so null returned + return null; + } + } + } + return result; + } + + public String getStreamQueryFilePath() { + String path = config.getString(STREAM_QUERY, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + public List getIntermediateQueryFilePaths() { + String[] queries = config.getStringArray(INTERMEDIATE_QUERIES); + List queryList = queries == null ? new ArrayList<>() : Arrays.asList(queries); + List result = new ArrayList<>(); + for (String path : queryList) { + if (path != null && !path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + result.add(path); + } + return result; + } + + public String getFinalQueryFilePath() { + String path = config.getString(FINAL_QUERY, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + public String getSolutionModifier() { + return config.getString(SOLUTION_MODIFIER, null); + } + + public String getStreamToFinalQueryVariableMappingFilePath() { + String path = config.getString(STREAM_TO_FINAL_QUERY_VARIABLE_MAPPING, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + public String getContextEnrichmentFilePath() { + String path = config.getString(CONTEXT_ENRICHMENT, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryConfig.java new file mode 100644 index 0000000..501d284 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryConfig.java @@ -0,0 +1,100 @@ +package be.ugent.idlab.divide.configuration.legacy; + +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.io.FilenameUtils; + +import java.io.File; +import java.nio.file.Paths; + +/** + * Configuration of a single DIVIDE query that should be added to the DIVIDE + * engine upon start-up. + */ +@SuppressWarnings("unused") +public class DivideQueryConfig { + + private static final String QUERY_PATTERN = "query-pattern"; + private static final String SENSOR_QUERY_RULE = "sensor-query-rule"; + private static final String GOAL = "goal"; + + private static final String CONTEXT_ENRICHMENT = "context-enrichment"; + + private final Configuration config; + private final String queryName; + private final String configFileDirectory; + + private DivideQueryConfig(String propertiesFilePath) throws ConfigurationException { + config = new PropertiesConfiguration(propertiesFilePath); + queryName = FilenameUtils.getBaseName(propertiesFilePath); + configFileDirectory = new File(propertiesFilePath) + .getAbsoluteFile().getParentFile().getAbsolutePath(); + } + + /** + * Creates a DIVIDE query config object based on the given properties file. + * + * @param propertiesFile path to query properties file + * @return an instantiated DIVIDE query config object which can be used to + * retrieve the configuration parameters of the DIVIDE query + * @throws ConfigurationException if the properties file does not exist or is invalid + */ + public static DivideQueryConfig getInstance(String propertiesFile) + throws ConfigurationException { + return new DivideQueryConfig(propertiesFile); + } + + /** + * @return the name of the DIVIDE query, + * which equals based on the name of the properties file + */ + public String getQueryName() { + return queryName; + } + + /** + * @return path to query pattern file specified in DIVIDE query config file, + * or null if not specified + */ + public String getQueryPatternFilePath() { + String path = config.getString(QUERY_PATTERN, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + /** + * @return path to sensor query rule file specified in DIVIDE query config file, + * or null if not specified + */ + public String getSensorQueryRuleFilePath() { + String path = config.getString(SENSOR_QUERY_RULE, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + /** + * @return path to goal file specified in DIVIDE query config file, + * or null if not specified + */ + public String getGoalFilePath() { + String path = config.getString(GOAL, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + + public String getContextEnrichmentFilePath() { + String path = config.getString(CONTEXT_ENRICHMENT, ""); + if (!path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + return path; + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryContextEnrichmentConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryContextEnrichmentConfig.java new file mode 100644 index 0000000..2b8dc03 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideQueryContextEnrichmentConfig.java @@ -0,0 +1,50 @@ +package be.ugent.idlab.divide.configuration.legacy; + +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; + +import java.io.File; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +@SuppressWarnings("unused") +public class DivideQueryContextEnrichmentConfig { + + private static final String DO_REASONING = "do-reasoning"; + private static final String EXECUTE_ON_ONTOLOGY_TRIPLES = "execute-on-ontology-triples"; + private static final String QUERIES = "queries"; + + protected final Configuration config; + private final String configFileDirectory; + + public DivideQueryContextEnrichmentConfig(String propertiesFilePath) throws ConfigurationException { + config = new PropertiesConfiguration(propertiesFilePath); + configFileDirectory = new File(propertiesFilePath) + .getAbsoluteFile().getParentFile().getAbsolutePath(); + } + + public boolean executeOnOntologyTriples() { + return config.getBoolean(EXECUTE_ON_ONTOLOGY_TRIPLES, true); + } + + public boolean doReasoning() { + return config.getBoolean(DO_REASONING, true); + } + + public List getQueryFilePaths() { + String[] queries = config.getStringArray(QUERIES); + List queryList = queries == null ? new ArrayList<>() : Arrays.asList(queries); + List result = new ArrayList<>(); + for (String path : queryList) { + if (path != null && !path.isEmpty() && !Paths.get(path).isAbsolute()) { + path = Paths.get(configFileDirectory, path).toString(); + } + result.add(path); + } + return result; + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideVariableMappingConfig.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideVariableMappingConfig.java new file mode 100644 index 0000000..ed6c7f3 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/legacy/DivideVariableMappingConfig.java @@ -0,0 +1,35 @@ +package be.ugent.idlab.divide.configuration.legacy; + +import org.apache.commons.configuration.Configuration; +import org.apache.commons.configuration.ConfigurationException; +import org.apache.commons.configuration.PropertiesConfiguration; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +@SuppressWarnings("unused") +public class DivideVariableMappingConfig { + + private final Configuration config; + + public DivideVariableMappingConfig(String propertiesFilePath) throws ConfigurationException { + config = new PropertiesConfiguration(propertiesFilePath); + } + + public Map getVariableMapping() throws ConfigurationException { + Map mapping = new HashMap<>(); + Iterator it = config.getKeys(); + while (it.hasNext()) { + String key = it.next(); + String value = config.getString(key, null); + if (value == null) { + throw new ConfigurationException( + "Variable mapping file can only contain string values"); + } + mapping.put(key, value); + } + return mapping; + } + +} diff --git a/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/util/CustomJsonConfiguration.java b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/util/CustomJsonConfiguration.java new file mode 100644 index 0000000..09741d8 --- /dev/null +++ b/src/divide-central/divide-server/src/main/java/be/ugent/idlab/divide/configuration/util/CustomJsonConfiguration.java @@ -0,0 +1,64 @@ +package be.ugent.idlab.divide.configuration.util; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.type.MapType; +import org.apache.commons.configuration2.JSONConfiguration; +import org.apache.commons.configuration2.ex.ConfigurationException; + +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.io.Writer; +import java.util.Map; + +public class CustomJsonConfiguration extends JSONConfiguration { + + private final ObjectMapper mapper = new ObjectMapper(); + private final MapType type; + + public CustomJsonConfiguration(String filePath) throws FileNotFoundException, ConfigurationException { + this.mapper.configure(JsonParser.Feature.ALLOW_COMMENTS, true); + this.type = this.mapper.getTypeFactory().constructMapType( + Map.class, String.class, Object.class); + + read(new FileReader(filePath)); + } + + @Override + public void read(Reader in) throws ConfigurationException { + try { + Map map = this.mapper.readValue(in, this.type); + this.load(map); + } catch (Exception var3) { + rethrowException(var3); + } + } + + @Override + public void write(Writer out) throws ConfigurationException, IOException { + this.mapper.writer().writeValue(out, + this.constructMap(this.getNodeModel().getNodeHandler().getRootNode())); + } + + @Override + public void read(InputStream in) throws ConfigurationException { + try { + Map map = this.mapper.readValue(in, this.type); + this.load(map); + } catch (Exception var3) { + rethrowException(var3); + } + } + + static void rethrowException(Exception e) throws ConfigurationException { + if (e instanceof ClassCastException) { + throw new ConfigurationException("Error parsing", e); + } else { + throw new ConfigurationException("Unable to load the configuration", e); + } + } + +} diff --git a/src/divide-central/pom.xml b/src/divide-central/pom.xml new file mode 100644 index 0000000..c15a26e --- /dev/null +++ b/src/divide-central/pom.xml @@ -0,0 +1,26 @@ + + + 4.0.0 + + be.ugent.idlab + divide + 1.0 + + divide-engine + divide-server + divide-monitor + divide-api + divide-eye + divide-query-derivation + + + pom + + + UTF-8 + 1.8 + 1.8 + + \ No newline at end of file diff --git a/swj2022/README.md b/swj2022/README.md index fced2f8..d6eaf8c 100644 --- a/swj2022/README.md +++ b/swj2022/README.md @@ -1,6 +1,6 @@ -# Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE +# Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design -This folder contains all supportive material related to the paper "Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. +This folder contains all supportive material related to the paper "Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. ## Contents @@ -12,6 +12,9 @@ This folder contains three subfolders: * [`evaluations`](evaluations): This folder contains supportive material related to the evaluations performed in the paper. * [`eye-implementation`](eye-implementation): This folder contains some more details concerning the implementation of the initialization and query derivation of DIVIDE with the EYE reasoner. +Moreover, this folder contains the different versions of the paper that have been submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. +* [paper_v1_submitted_2022-05-01.pdf](paper_v1_submitted_2022-05-01.pdf): This PDF represents the original version of the paper, that was submitted on 1 May 2022. It contains additional details about the DIVIDE methodology and the use case scenario, that have been removed in the first revision of the paper. + ## Contact The main contact person directly involved with this research is [Mathias De Brouwer](https://www.linkedin.com/in/mathiasdebrouwer/). In case of any remarks or questions, you can email [mrdbrouw.DeBrouwer@UGent.be](mailto:mrdbrouw.DeBrouwer@UGent.be) or [create a GitHub issue](../../../issues/new). diff --git a/swj2022/evaluations/README.md b/swj2022/evaluations/README.md index ac64522..4677ea3 100644 --- a/swj2022/evaluations/README.md +++ b/swj2022/evaluations/README.md @@ -1,6 +1,6 @@ -# Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE +# Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design -This folder contains supportive material for the evaluations in the paper "Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. +This folder contains supportive material for the evaluations in the paper "Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. ## Contents @@ -9,7 +9,11 @@ The folder contains supportive material for the following evaluations: * [`divide-performance`](divide-performance): The material in this folder is related to the performance evaluation of DIVIDE. It corresponds to the evaluation set-up and results in the Sections 8.1 and 9.1 of the paper ("Performance evaluation of DIVIDE"). * [`real-time-comparison`](real-time-comparison): The material in this folder is related to the real-time evaluation of the DIVIDE approach, compared to other alternative approaches that use real-time semantic reasoning. It corresponds to the evaluation set-up and results in the Sections 8.3 and 9.2-3 of the paper ("Real-time evaluation of derived DIVIDE queries"). -The context used for the evaluations is represented by the [`context.ttl`](context.ttl) file. This file contains all context triples in RDF/Turtle syntax. +The context used for the evaluations is represented by the [`context.ttl`](context.ttl) file. This file contains all context triples in RDF/Turtle syntax. + +The [`divide-server-1.0-jar-with-dependencies.jar`](divide-server-1.0-jar-with-dependencies.jar) file represents the compiled Java JAR of the DIVIDE server module used for the evaluations in the paper. The corresponding source code can be found in the [`src/divide-central`](../../src/divide-central) folder of this repository. The version of the source code to build the given Java JAR (and thus the version used for the evaluations) is tagged with the 'swj-2022' tag (see [tag page](../../../../tags)). + +The realistic dataset, collected in the imec-UGent HomeLab and used in the paper to extract the activity rules for this evaluation and to create the simulation dataset, is publicly available on the DAHCC ontology website via [this link](https://dahcc.idlab.ugent.be/dataset.html). ## Contact diff --git a/swj2022/evaluations/divide-performance/README.md b/swj2022/evaluations/divide-performance/README.md index 97f4ab6..d79b898 100644 --- a/swj2022/evaluations/divide-performance/README.md +++ b/swj2022/evaluations/divide-performance/README.md @@ -1,6 +1,6 @@ -# Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE +# Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design -This folder contains supportive material for the evaluations in the paper "Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. +This folder contains supportive material for the evaluations in the paper "Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. ## Contents @@ -8,6 +8,8 @@ The material in this folder is related to the performance evaluation of the DIVI The [`divide-queries`](divide-queries) subfolder contains the configuration details of the DIVIDE query definitions that are being used in this evaluation. These include the DIVIDE queries corresponding to the toileting and brushing teeth activity rules. For both activities, the root folder of the corresponding DIVIDE query contains the internal representation of this DIVIDE query. For the toileting query, the end-user definition (as a series of SPARQL queries) is also included in the `sparql` subfolder. +Note that the DIVIDE query for the showering activity (used in the real-time comparison evaluation) is the same DIVIDE query as the one for the toileting query (as explained in the paper). + ## Contact The main contact person directly involved with this research is [Mathias De Brouwer](https://www.linkedin.com/in/mathiasdebrouwer/). In case of any remarks or questions, you can email [mrdbrouw.DeBrouwer@UGent.be](mailto:mrdbrouw.DeBrouwer@UGent.be) or [create a GitHub issue](../../../../../issues/new). diff --git a/swj2022/evaluations/divide-server-1.0-jar-with-dependencies.jar b/swj2022/evaluations/divide-server-1.0-jar-with-dependencies.jar new file mode 100644 index 0000000..cd1b07a Binary files /dev/null and b/swj2022/evaluations/divide-server-1.0-jar-with-dependencies.jar differ diff --git a/swj2022/evaluations/real-time-comparison/README.md b/swj2022/evaluations/real-time-comparison/README.md index b5e8578..85cbbb8 100644 --- a/swj2022/evaluations/real-time-comparison/README.md +++ b/swj2022/evaluations/real-time-comparison/README.md @@ -1,6 +1,6 @@ -# Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE +# Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design -This folder contains supportive material for the evaluations in the paper "Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. +This folder contains supportive material for the evaluations in the paper "Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. ## Contents diff --git a/swj2022/eye-implementation/README.md b/swj2022/eye-implementation/README.md index 05c780f..9fe75e4 100644 --- a/swj2022/eye-implementation/README.md +++ b/swj2022/eye-implementation/README.md @@ -1,6 +1,6 @@ -# Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE +# Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design -This folder contains DIVIDE implementation details related to the paper "Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. +This folder contains DIVIDE implementation details related to the paper "Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. ## Contents diff --git a/swj2022/ontology/README.md b/swj2022/ontology/README.md index 67936ab..89b51b2 100644 --- a/swj2022/ontology/README.md +++ b/swj2022/ontology/README.md @@ -1,6 +1,6 @@ -# Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE +# Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design -This folder contains the ontology data related to the paper "Context-aware & privacy-preserving homecare monitoring through adaptive query derivation for IoT data streams with DIVIDE", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. +This folder contains the ontology data related to the paper "Context-aware query derivation for IoT data streams with DIVIDE enabling privacy by design", which is submitted to the Special Issue on Semantic Web Meets Health Data Management of the Semantic Web Journal. ## Contents @@ -8,8 +8,8 @@ In the paper, DIVIDE is explained through a running homecare monitoring example. The Activity Recognition ontology contains two parts: -- A snapshot of the [DAHCC ontology](https://github.com/predict-idlab/DAHCC-Sources). This ontology contains definitions to perform Data Analytics in Health and Connected Care. The files used for this paper are: - - The general ontology files included in the `Ontology` folder of the DAHCC GitHub repo. Note that this folder contains the RDF/Turtle representation of these ontology files. +- A snapshot of the [DAHCC ontology](https://github.com/predict-idlab/DAHCC-Sources). This ontology contains definitions to perform Data Analytics in Health and Connected Care. More information about the DAHCC ontology is available via [this website](https://dahcc.idlab.ugent.be) The files used for this paper are: + - The general ontology files included in the `Ontology` folder of the DAHCC GitHub repo. Note that the current repository contains the RDF/Turtle representation of these ontology files. - The TBox definitions extracted from the `_Homelab.owl` and `_HomelabWearable.owl` files in the `instantiated_examples` folder of the DAHCC GitHub repo (in RDF/Turtle format). - All imports of the `imports` folder of the DAHCC GitHub repo that are being (indirectly) imported by any of the other included DAHCC files. - The additional ontology file [`KBActivityRecognition.ttl`](KBActivityRecognition.ttl) that represents all extra definitions related to the knowledge-driven activity recognition. diff --git a/swj2022/paper_v1_submitted_2022-05-01.pdf b/swj2022/paper_v1_submitted_2022-05-01.pdf new file mode 100644 index 0000000..cf37f7a Binary files /dev/null and b/swj2022/paper_v1_submitted_2022-05-01.pdf differ