Skip to content

Commit

Permalink
Merge pull request #111 from caraml-dev/spark-crd-support
Browse files Browse the repository at this point in the history
feat(registry): Add support of memoryOverhead and coreLimit for Spark jobs
  • Loading branch information
khorshuheng authored Nov 1, 2023
2 parents e0f8fd7 + 50b319d commit ef30a22
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,11 @@ public class SparkDriverSpec {
private List<V1Toleration> tolerations;
private Integer cores;
private String coreRequest;
private String coreLimit;
private String mainApplicationFile;
private String mainClass;
private String memory;
private String memoryOverhead;
private String javaOptions;
private Map<String, String> labels;
private String serviceAccount;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@ public class SparkExecutorSpec {
private List<V1Toleration> tolerations;
private Integer cores;
private String coreRequest;
private String coreLimit;
private Integer instances;
private String memory;
private String memoryOverhead;
private String javaOptions;
private Map<String, String> labels;
private Map<String, String> annotations;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,11 @@ public void shouldCreateStreamingJob() throws IOException, SparkOperatorApiExcep
SparkExecutorSpec templateExecutorSpec = new SparkExecutorSpec();
templateDriverSpec.setCores(1);
templateDriverSpec.setMemory("1g");
templateDriverSpec.setMemoryOverhead("250m");
templateDriverSpec.setLabels(Map.of("team", "${team}"));
templateExecutorSpec.setCores(2);
templateExecutorSpec.setCoreRequest("100m");
templateExecutorSpec.setCoreLimit("200m");
templateExecutorSpec.setMemory("2g");
templateSparkApplicationSpec.setDriver(templateDriverSpec);
templateSparkApplicationSpec.setExecutor(templateExecutorSpec);
Expand Down Expand Up @@ -205,10 +208,13 @@ public void shouldCreateStreamingJob() throws IOException, SparkOperatorApiExcep
SparkDriverSpec expectedDriverSpec = new SparkDriverSpec();
expectedDriverSpec.setCores(2);
expectedDriverSpec.setMemory("1g");
expectedDriverSpec.setMemoryOverhead("250m");
expectedDriverSpec.setLabels(Map.of("team", "some-team"));
expectedSparkApplicationSpec.setDriver(expectedDriverSpec);
SparkExecutorSpec expectedExecutorSpec = new SparkExecutorSpec();
expectedExecutorSpec.setCores(2);
expectedExecutorSpec.setCoreRequest("100m");
expectedExecutorSpec.setCoreLimit("200m");
expectedExecutorSpec.setMemory("3g");
expectedSparkApplicationSpec.setExecutor(expectedExecutorSpec);
expectedSparkApplicationSpec.setImage("sparkImage:latest");
Expand Down

0 comments on commit ef30a22

Please sign in to comment.