Skip to content

Commit

Permalink
(CDAP-7150) Ignore missing Spark assembly jar if the workflow doesn’t…
Browse files Browse the repository at this point in the history
… use Spark. (#6548)
  • Loading branch information
chtyim authored and sreevatsan raman committed Aug 22, 2016
1 parent 75accdb commit 50c89ff
Showing 1 changed file with 14 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,21 @@ protected ProgramController launch(Program program, ProgramOptions options,
// Adds the extra class that Spark runtime needed
ProgramRuntimeProvider provider = runtimeProviderLoader.get(ProgramType.SPARK);
if (provider != null) {
extraDependencies.add(provider.getClass());
try {
String sparkAssemblyJarName = SparkUtils.prepareSparkResources(tempDir, localizeResources);
// Localize the spark-assembly jar and spark conf zip
extraClassPaths.add(sparkAssemblyJarName);
extraDependencies.add(provider.getClass());
} catch (Exception e) {
if (driverMeta.hasSpark) {
// If the Workflow actually has spark, we can't ignore this error.
throw e;
}
// Otherwise, this can be ignore.
LOG.debug("Spark assembly jar is not present. It doesn't affected Workflow {} since it doesn't use Spark.",
program.getId(), e);
}

// Localize the spark-assembly jar and spark conf zip
String sparkAssemblyJarName = SparkUtils.prepareSparkResources(tempDir, localizeResources);
extraClassPaths.add(sparkAssemblyJarName);
} else if (driverMeta.hasSpark) {
// If the workflow contains spark and yet the spark runtime provider is missing, then it's an error.
throw new IllegalStateException("Missing Spark runtime system. Not able to run Spark program in Workflow.");
Expand Down

0 comments on commit 50c89ff

Please sign in to comment.