Skip to content

Commit

Permalink
Merge pull request #255 from kbss-cvut/224-inject-logs-with-annotation
Browse files Browse the repository at this point in the history
224 inject logs with annotation
  • Loading branch information
blcham authored Aug 14, 2024
2 parents b74c7c8 + 0d7eaf9 commit cda1cdb
Show file tree
Hide file tree
Showing 90 changed files with 454 additions and 420 deletions.
7 changes: 7 additions & 0 deletions s-pipes-cli/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,11 @@
<artifactId>logback-core</artifactId>
<version>${ch.qos.logback.version}</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
</dependency>
<!-- IO utils -->
<dependency>
<groupId>commons-io</groupId>
Expand All @@ -72,6 +77,8 @@
</dependency>
</dependencies>



<build>
<plugins>
<plugin>
Expand Down
21 changes: 11 additions & 10 deletions s-pipes-cli/src/main/java/cz/cvut/spipes/cli/ExecuteModuleCLI.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import cz.cvut.spipes.manager.OntologyDocumentManager;
import cz.cvut.spipes.manager.SPipesScriptManager;
import cz.cvut.spipes.modules.Module;
import lombok.extern.slf4j.Slf4j;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.QuerySolutionMap;
import org.apache.jena.rdf.model.Model;
Expand All @@ -29,7 +30,7 @@
import java.util.Map;
import java.util.stream.Collectors;


@Slf4j
public class ExecuteModuleCLI {

// cat input-data.rdf | s-pipes execute --instance "<http://url>"
Expand All @@ -39,7 +40,7 @@ public class ExecuteModuleCLI {
// > output.data.rdf


private static final Logger LOG = LoggerFactory.getLogger(ExecuteModuleCLI.class);

private static final String DEFAULT_DELIMITER = ";";
//@Option(name = "-d", aliases = "--delimiter", metaVar = "DELIMITER", usage = "Input variables data delimiter ('" + DEFAULT_DELIMITER + "' by default)")
//private String delimiter = DEFAULT_DELIMITER;
Expand Down Expand Up @@ -98,32 +99,32 @@ public static void main(String[] args) throws IOException {
CmdLineUtils.parseCommandLine(args, argParser);

String output = String.join(" ", args);
LOG.info("Executing external module/function ... " + output);
log.info("Executing external module/function ... " + output);

// ----- load input model
Model inputDataModel = ModelFactory.createDefaultModel();

if (asArgs.inputDataFiles != null) {
for (File idFile : asArgs.inputDataFiles) {
LOG.debug("Loading input data from file {} ...", idFile);
log.debug("Loading input data from file {} ...", idFile);
inputDataModel.read(new FileInputStream(idFile), null, FileUtils.langTurtle);
}
}
if (asArgs.isInputDataFromStdIn) {
LOG.info("Loading input data from std-in ...");
log.info("Loading input data from std-in ...");
inputDataModel.read(System.in, null, FileUtils.langTurtle);
}

// ----- load modules and functions
LOG.debug("Loading scripts ...");
log.debug("Loading scripts ...");
SPipesScriptManager scriptManager = scriptManager = createSPipesScriptManager();
OntoDocManager.registerAllSPINModules();

// ----- load input bindings
VariablesBinding inputVariablesBinding = new VariablesBinding();
if (asArgs.inputBindingParametersMap != null) {
inputVariablesBinding = new VariablesBinding(transform(asArgs.inputBindingParametersMap));
LOG.info("Loaded input variable binding ={}", inputVariablesBinding);
log.info("Loaded input variable binding ={}", inputVariablesBinding);
}

// ----- create execution context
Expand All @@ -139,7 +140,7 @@ public static void main(String[] args) throws IOException {
}
ExecutionContext outputExecutionContext = engine.executePipeline(module, inputExecutionContext);

LOG.info("Processing successfully finished.");
log.info("Processing successfully finished.");
// outputExecutionContext.getDefaultModel().write(System.out);

// Model inputBindingModel = null;
Expand Down Expand Up @@ -265,7 +266,7 @@ private static SPipesScriptManager createSPipesScriptManager() {

// load from environment variables
String spipesOntologiesPathsStr = System.getenv(AppConstants.SYSVAR_SPIPES_ONTOLOGIES_PATH);
LOG.debug("Loading scripts from system variable {} = {}", AppConstants.SYSVAR_SPIPES_ONTOLOGIES_PATH, spipesOntologiesPathsStr);
log.debug("Loading scripts from system variable {} = {}", AppConstants.SYSVAR_SPIPES_ONTOLOGIES_PATH, spipesOntologiesPathsStr);
if (spipesOntologiesPathsStr != null) {
scriptPaths.addAll(
Arrays.stream(spipesOntologiesPathsStr.split(";"))
Expand Down Expand Up @@ -303,7 +304,7 @@ public static List<String> registerGlobalScripts(OntologyDocumentManager ontDocM
ontoUri -> {
String loc = locMapper.getAltEntry(ontoUri);
if (loc.endsWith(".sms.ttl")) {
LOG.info("Registering script from file " + loc + ".");
log.info("Registering script from file " + loc + ".");
_globalScripts.add(ontoUri);
}
}
Expand Down
1 change: 1 addition & 0 deletions s-pipes-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@
<groupId>cz.cvut.kbss.jopa</groupId>
<artifactId>ontodriver-sesame</artifactId>
</dependency>

</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@

class ExecutionEngineImpl implements ExecutionEngine {

private static Logger LOG = LoggerFactory.getLogger(ExecutionEngineImpl.class);
private static Logger log = LoggerFactory.getLogger(ExecutionEngineImpl.class);

private Set<ProgressListener> listeners = new HashSet<>();

private static int i = 0 ;

public ExecutionContext executePipeline(final Module module, final ExecutionContext inputContext) {
LOG.info("Executing script {} with context {}.", module.getResource(), inputContext.toSimpleString());
log.info("Executing script {} with context {}.", module.getResource(), inputContext.toSimpleString());
final long pipelineExecutionId = Instant.now().toEpochMilli()*1000+(i++);

fire((l) -> {l.pipelineExecutionStarted(pipelineExecutionId); return null;});
Expand All @@ -37,7 +37,7 @@ private void fire(final Function<ProgressListener,Void> function) {
try {
function.apply(listener);
} catch(final Exception e) {
LOG.warn("Listener {} failed.", listener, e);
log.warn("Listener {} failed.", listener, e);
}
});
}
Expand All @@ -58,17 +58,17 @@ private ExecutionContext _executePipeline(long pipelineExecutionId, Module modul
fire((l) -> {l.moduleExecutionStarted(pipelineExecutionId, moduleExecutionId, module, context, predecessorId); return null;});

if (module.getExecutionContext() != null) {
LOG.debug("Execution context for module {} already set.", module);
log.debug("Execution context for module {} already set.", module);
} else {
module.setInputContext(context);

LOG.info(" ##### " + module.getLabel());
if (LOG.isTraceEnabled()) {
LOG.trace("Using input context {}", context.toTruncatedSimpleString()); //TODO redundant code -> merge
log.info(" ##### " + module.getLabel());
if (log.isTraceEnabled()) {
log.trace("Using input context {}", context.toTruncatedSimpleString()); //TODO redundant code -> merge
}
ExecutionContext outputContext = module.execute();
if (LOG.isTraceEnabled()) {
LOG.trace("Returning output context {}", outputContext.toSimpleString());
if (log.isTraceEnabled()) {
log.trace("Returning output context {}", outputContext.toSimpleString());
}
module.addOutputBindings(context.getVariablesBinding());
}
Expand All @@ -80,18 +80,18 @@ private ExecutionContext _executePipeline(long pipelineExecutionId, Module modul
.collect(Collectors.toMap(Module::getResource, mod -> this._executePipeline(pipelineExecutionId, mod, context, moduleExecutionId)));


LOG.info(" ##### " + module.getLabel());
log.info(" ##### " + module.getLabel());
ExecutionContext mergedContext = mergeContexts(resource2ContextMap);
if (LOG.isTraceEnabled()) {
LOG.trace("Using input merged context {}", mergedContext.toTruncatedSimpleString());
if (log.isTraceEnabled()) {
log.trace("Using input merged context {}", mergedContext.toTruncatedSimpleString());
}
fire((l) -> {l.moduleExecutionStarted(pipelineExecutionId, moduleExecutionId, module, mergedContext, predecessorId); return null;});

module.setInputContext(mergedContext);

ExecutionContext outputContext = module.execute();
if (LOG.isTraceEnabled()) {
LOG.trace("Returning output context {}", outputContext.toSimpleString());
if (log.isTraceEnabled()) {
log.trace("Returning output context {}", outputContext.toSimpleString());
}
module.addOutputBindings(mergedContext.getVariablesBinding());
fire((l) -> {l.moduleExecutionFinished(pipelineExecutionId, moduleExecutionId, module); return null;});
Expand Down Expand Up @@ -128,7 +128,7 @@ private ExecutionContext mergeContexts(Map<Resource, ExecutionContext> resource2
VariablesBinding conflictingBinding = variablesBinding.extendConsistently(b);

if (! conflictingBinding.isEmpty()) {
LOG.warn("Module {} has conflicting variables binding {} with sibling modules ocurring in pipeline. ", modRes, context);
log.warn("Module {} has conflicting variables binding {} with sibling modules ocurring in pipeline. ", modRes, context);
}
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

public class PipelineFactory {

private static final Logger LOG = LoggerFactory.getLogger(PipelineFactory.class);
private static final Logger log = LoggerFactory.getLogger(PipelineFactory.class);

// TODO inheritence not involved, not static context
static Map<Resource, Class<? extends Module>> moduleTypes = new HashMap<>();
Expand All @@ -54,12 +54,12 @@ public static void registerModuleType(Resource moduleType, Class<? extends Modul
}

private static void _registerModuleType(Resource moduleType, Class<? extends Module> moduleClass) {
LOG.info(" module: {} -> {}", moduleType, moduleClass);
log.info(" module: {} -> {}", moduleType, moduleClass);
moduleTypes.put(moduleType, moduleClass);
}

private static void _registerFunctionType(Resource functionType, Class<? extends ARQFunction> functionClass) {
LOG.info(" function: {} -> {}", functionType, functionClass);
log.info(" function: {} -> {}", functionType, functionClass);
FunctionRegistry.get().put(functionType.getURI(), functionClass);
}

Expand Down Expand Up @@ -114,7 +114,7 @@ public static Module loadModule(@NotNull Resource moduleRes) {
// TODO multiple module types per resource
Resource moduleTypeRes = moduleRes.getPropertyResourceValue(RDF.type);
if (moduleTypeRes == null) {
LOG.error("Cannot load module {} as its {} property value is missing.", moduleRes, RDF.type);
log.error("Cannot load module {} as its {} property value is missing.", moduleRes, RDF.type);
return null;
}
return loadModule(moduleRes, moduleTypeRes);
Expand Down Expand Up @@ -173,7 +173,7 @@ private static Map<Resource, Module> loadAllModules(@NotNull Model configModel)
.map(st -> {
Module m = res2ModuleMap.get(st.getObject().asResource());
if (m == null) {
LOG.error("Ignoring statement {}. The object of the triple must have rdf:type {}.", st, SM.Module);
log.error("Ignoring statement {}. The object of the triple must have rdf:type {}.", st, SM.Module);
}
return m;
}).filter(m -> (m != null)).forEach(
Expand All @@ -193,7 +193,7 @@ private static Module loadModule(@NotNull Resource moduleRes, @NotNull Resource
Class<? extends Module> moduleClass = moduleTypes.get(moduleTypeRes);

if (moduleClass == null) {
LOG.error("Ignoring module {}. Its type {} is not registered.", moduleRes, moduleTypeRes);
log.error("Ignoring module {}. Its type {} is not registered.", moduleRes, moduleTypeRes);
return null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public class VariablesBinding {

// TODO stream variables etc.

private static Logger LOG = LoggerFactory.getLogger(VariablesBinding.class);
private static Logger log = LoggerFactory.getLogger(VariablesBinding.class);
private static final int MAX_TRUNCATED_VALUE_SIZE = 300;
QuerySolutionMap binding = new QuerySolutionMap();

Expand All @@ -31,7 +31,7 @@ public VariablesBinding(QuerySolution querySolution) {
key -> {
RDFNode value = querySolution.get(key);
if (value == null) {
LOG.error("Ignoring variable binding with null value for the variable name \"{}\".", key);
log.error("Ignoring variable binding with null value for the variable name \"{}\".", key);
} else {
binding.add(key, value);
}
Expand Down Expand Up @@ -79,7 +79,7 @@ public VariablesBinding extendConsistently(VariablesBinding newVarsBinding) {

if ((oldNode != null) && (!oldNode.equals(newNode))) {
conflictingBinding.add(v, newNode);
LOG.warn("Variable \"{}\" have been bind to value \"{}\", ignoring assignment to value \"{}\".", v, oldNode, newNode);
log.warn("Variable \"{}\" have been bind to value \"{}\", ignoring assignment to value \"{}\".", v, oldNode, newNode);
} else {
this.add(v, newNode);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
import java.util.*;

public class AdvancedLoggingProgressListener implements ProgressListener {
private static final Logger LOG =
private static final Logger log =
LoggerFactory.getLogger(AdvancedLoggingProgressListener.class);
/**
* Maps pipeline executions and module executions to the transformation object.
Expand Down Expand Up @@ -173,7 +173,7 @@ private void persistPipelineExecutionFinished2(final EntityManager em, final lon

private void persistPipelineExecutionFinished(final EntityManager em, final long pipelineExecutionId) {
if (em.isOpen()) {
LOG.debug("Saving metadata about finished pipeline execution {}.", pipelineExecutionId);
log.debug("Saving metadata about finished pipeline execution {}.", pipelineExecutionId);
Date finishDate = new Date();
em.getTransaction().begin();

Expand Down Expand Up @@ -359,7 +359,7 @@ private void writeRawData(EntityManager em, URI contextUri, Model model) {
connection.getValueFactory().createIRI(contextUri.toString()));
connection.commit();
} catch (final RepositoryException | RDFParseException | RepositoryConfigException | IOException e) {
LOG.error(e.getMessage(), e);
log.error(e.getMessage(), e);
} finally {
if (connection != null && connection.isOpen()) {
connection.close();
Expand All @@ -386,7 +386,7 @@ private void saveModelToFile(String filePath, Model model) {
try (OutputStream fileIs = new FileOutputStream(file)) {
model.write(fileIs, FileUtils.langTurtle);
} catch (IOException e) {
LOG.error("Error during dataset snapshot saving.", e);
log.error("Error during dataset snapshot saving.", e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import java.util.Set;

public class SemanticLoggingProgressListener implements ProgressListener {
private static final Logger LOG =
private static final Logger log =
LoggerFactory.getLogger(SemanticLoggingProgressListener.class);

/**
Expand Down Expand Up @@ -185,14 +185,14 @@ private String saveModelToFile(Path dir, String fileName, Model model) {
file =
Files.createFile(dir.resolve(TempFileUtils.createTimestampFileName(fileName))).toFile();
} catch (IOException e) {
LOG.error("Error during file creation.", e);
log.error("Error during file creation.", e);
return null;
}
try (OutputStream fileIs = new FileOutputStream(file)) {
model.write(fileIs, FileUtils.langTurtle);
return file.toURI().toURL().toString();
} catch (IOException e) {
LOG.error("Error during dataset snapshot saving.", e);
log.error("Error during dataset snapshot saving.", e);
return null;
}
}
Expand Down
Loading

0 comments on commit cda1cdb

Please sign in to comment.