Skip to content

Commit

Permalink
Fixing service test cases
Browse files Browse the repository at this point in the history
  • Loading branch information
TebaleloS committed Jun 3, 2024
1 parent f5e3d4c commit 6d8cbe3
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 78 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ CREATE OR REPLACE FUNCTION flows.get_flow_checkpoints(
OUT id_checkpoint UUID,
OUT checkpoint_name TEXT,
OUT measure_name TEXT,
OUT measure_columns TEXT[],
OUT measured_columns TEXT[],
OUT measurement_value JSONB,
OUT checkpoint_start_time TIMESTAMP WITH TIME ZONE,
OUT checkpoint_end_time TIMESTAMP WITH TIME ZONE
Expand Down Expand Up @@ -58,7 +58,7 @@ $$
-- id_checkpoint - id of retrieved checkpoint
-- checkpoint_name - name of retrieved checkpoint
-- measure_name - measure name associated with a given checkpoint
-- measure_columns - measure columns associated with a given checkpoint
-- measured_columns - measure columns associated with a given checkpoint
-- measurement_value - measurement details associated with a given checkpoint
-- checkpoint_time - time
--
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{
assert(!queryResult.hasNext)
}

assert(table("runs.additional_data").count() == 4)
assert(table("runs.additional_data").count() == 3)
assert(table("runs.additional_data").count(add("fk_partitioning", fkPartitioning)) == 3)
assert(table("runs.additional_data_history").count(add("fk_partitioning", fkPartitioning)) == 1)

Expand Down Expand Up @@ -151,21 +151,21 @@ class CreateOrUpdateAdditionalDataIntegrationTests extends DBTestSuite{
assert(table("runs.additional_data").count(add("fk_partitioning", fkPartitioning)) == 5)
assert(table("runs.additional_data_history").count(add("fk_partitioning", fkPartitioning)) == 0)

val expectedDataInAdTable = Seq(
("PrimaryOwner", "TechnicalManagerX", "Bot"),
("SecondaryOwner", "AnalystY", "Bot"),
("SomeNewKey", "SomeNewValue", "MikeRusty"),
("IsDatasetInHDFS", "true", "MikeRusty"),
("DatasetContentSensitivityLevel", "1", "MikeRusty"),
)
expectedDataInAdTable.foreach { case (adNameExp, adValExp, adCreatedByExp) =>
table("runs.additional_data").where(add("ad_name", adNameExp)) {
resultSet =>
val row = resultSet.next()
assert(row.getString("ad_value").contains(adValExp))
assert(row.getString("created_by").contains(adCreatedByExp))
}
}
val expectedDataInAdTable = Seq(
("PrimaryOwner", "TechnicalManagerX", "Bot"),
("SecondaryOwner", "AnalystY", "Bot"),
("SomeNewKey", "SomeNewValue", "MikeRusty"),
("IsDatasetInHDFS", "true", "MikeRusty"),
("DatasetContentSensitivityLevel", "1", "MikeRusty"),
)
expectedDataInAdTable.foreach { case (adNameExp, adValExp, adCreatedByExp) =>
table("runs.additional_data").where(add("ad_name", adNameExp)) {
resultSet =>
val row = resultSet.next()
assert(row.getString("ad_value").contains(adValExp))
assert(row.getString("created_by").contains(adCreatedByExp))
}
}
}

test("Partitioning and AD present, but no new AD records were backed-up or inserted, no changes detected") {
Expand Down
133 changes: 76 additions & 57 deletions server/src/test/scala/za/co/absa/atum/server/api/TestData.scala
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,15 @@ trait TestData {
checkpointName = None
)

// PartitioningSubmitDTO with different author
protected val partitioningSubmitDTO2: PartitioningSubmitDTO =
partitioningSubmitDTO1.copy(authorIfNew = "differentAuthor")

protected val partitioningSubmitDTO3: PartitioningSubmitDTO =
partitioningSubmitDTO1.copy(authorIfNew = "yetAnotherAuthor")

// Measure
protected val measureDTO1: MeasureDTO = MeasureDTO("count", Seq("1"))
protected val measureDTO2: MeasureDTO = MeasureDTO("count", Seq("*"))
protected val measureDTO1: MeasureDTO = MeasureDTO("count1", Seq("col_A1", "col_B1"))
protected val measureDTO2: MeasureDTO = MeasureDTO("count2", Seq("col_A2", "col_B2"))

// Additional Data
protected val additionalDataDTO1: AdditionalDataDTO = Map(
Expand All @@ -86,17 +86,17 @@ trait TestData {
)
protected val additionalDataDTO3: AdditionalDataDTO = Map.empty

val mainValue = TypedValue(
value = "exampleMainValue",
valueType = String
val mainValue: TypedValue = TypedValue(
value = "123",
valueType = Long
)

val supportValue1 = TypedValue(
val supportValue1: TypedValue = TypedValue(
value = "123456789",
valueType = Long
)

val supportValue2 = TypedValue(
val supportValue2: TypedValue = TypedValue(
value = "12345.6789",
valueType = BigDecimal
)
Expand All @@ -120,7 +120,10 @@ trait TestData {

// Measurement DTO
protected val measurementsDTO1: Seq[MeasurementDTO] = Seq(
MeasurementDTO(measureDTO1, measureResultDTO1),
MeasurementDTO(measureDTO1, measureResultDTO1)
)

protected val measurementsDTO2: Seq[MeasurementDTO] = Seq(
MeasurementDTO(measureDTO2, measureResultDTO2)
)

Expand All @@ -131,42 +134,84 @@ trait TestData {
"key3" -> Some("value3")
)

// Additional Data DTO as a map
val defaultJsonString: String =
"""
{
"value": 1,
"name": "default",
"details": {
"info": "defaultInfo"
}
}
"""
// Checkpoint DTO
protected val checkpointDTO1: CheckpointDTO = CheckpointDTO(
id = UUID.randomUUID(),
name = "name",
author = "author",
partitioning = checkpointQueryDTO1.partitioning,
processStartTime = ZonedDateTime.now(),
processEndTime = Some(ZonedDateTime.now()),
measurements = measurementsDTO1.toSet
)

val defaultJson: Json = parser.parse(defaultJsonString).getOrElse(Json.Null)
protected val checkpointDTO2: CheckpointDTO = CheckpointDTO(
id = UUID.randomUUID(),
name = "name2",
author = "author2",
partitioning = checkpointQueryDTO1.partitioning,
processStartTime = ZonedDateTime.now(),
processEndTime = Some(ZonedDateTime.now()),
measurements = measurementsDTO2.toSet
)

protected val checkpointDTO3: CheckpointDTO = checkpointDTO1.copy(id = UUID.randomUUID())

// CheckpointMeasurement DTO
// Additional Data DTO as a map
val defaultJsonString: String = """
|{
| "mainValue": {
| "value": "123",
| "valueType": "Long"
| },
| "supportValues": {
| "key1": {
| "value": "123456789",
| "valueType": "Long"
| },
| "key2": {
| "value": "12345.6789",
| "valueType": "BigDecimal"
| }
| }
|}
|""".stripMargin

protected val defaultJson: Json = parser.parse(defaultJsonString).getOrElse(throw new Exception("Failed to pass JSON"))

// Checkpoint from DB DTO
protected val checkpointFromDB1: CheckpointFromDB = CheckpointFromDB(
idCheckpoint = UUID.randomUUID(),
idCheckpoint = checkpointDTO1.id,
checkpointName = "name",
author = "author",
measureName = measureDTO1.measureName,
measuredColumns = Seq(measureDTO1.measuredColumns.toString()),
measuredColumns = Seq("col_A1", "col_B1"),
measurementValue = defaultJson,
checkpointStartTime = ZonedDateTime.now(),
checkpointEndTime = Some(ZonedDateTime.now())
checkpointStartTime = checkpointDTO1.processStartTime,
checkpointEndTime = checkpointDTO1.processEndTime
)

protected val checkpointFromDB2: CheckpointFromDB = CheckpointFromDB(
idCheckpoint = UUID.randomUUID(),
idCheckpoint = checkpointDTO2.id,
checkpointName = "name2",
author = "author2",
measureName = measureDTO2.measureName,
measuredColumns = Seq("col_A2", "col_B2"),
measurementValue = defaultJson,
checkpointStartTime = checkpointDTO2.processStartTime,
checkpointEndTime = checkpointDTO2.processEndTime
)

protected val checkpointFromDB3: CheckpointFromDB = CheckpointFromDB(
idCheckpoint = checkpointDTO1.id,
checkpointName = "name",
author = "author",
measureName = measureDTO2.measureName,
measuredColumns = Seq(measureDTO2.measuredColumns.toString()),
measuredByAtumAgent = true,
measureName = "cnt",
measuredColumns = Seq("col3_A", "col3_B"),
measurementValue = defaultJson,
checkpointStartTime = ZonedDateTime.now(),
checkpointEndTime = Some(ZonedDateTime.now())
checkpointStartTime = checkpointDTO3.processStartTime,
checkpointEndTime = None,
)

// Additional Data submit DTO
Expand All @@ -193,32 +238,6 @@ trait TestData {
measures = Set(MeasureDTO("count", Seq("1")))
)

// Checkpoint DTO
protected val checkpointDTO1: CheckpointDTO = CheckpointDTO(
id = UUID.randomUUID(),
name = "name",
author = "author",
partitioning = checkpointQueryDTO1.partitioning,
processStartTime = ZonedDateTime.now(),
processEndTime = None,
measurements = measurementsDTO1.toSet
)

protected val checkpointDTO2: CheckpointDTO = checkpointDTO1.copy(id = UUID.randomUUID())

protected val checkpointDTO4: CheckpointDTO = CheckpointDTO(
id = UUID.randomUUID(),
name = "name",
author = "author",
measuredByAtumAgent = true,
partitioning = Seq.empty,
processStartTime = ZonedDateTime.now(),
processEndTime = None,
measurements = Set.empty
)

protected val checkpointDTO3: CheckpointDTO = checkpointDTO1.copy(id = UUID.randomUUID())

protected def createAtumContextDTO(partitioningSubmitDTO: PartitioningSubmitDTO): AtumContextDTO = {
val measures: Set[MeasureDTO] = Set(MeasureDTO("count", Seq("*")))
val additionalData: AdditionalDataDTO = Map.empty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ object PartitioningControllerIntegrationTests extends ZIOSpecDefault with TestDa
.thenReturn(ZIO.fail(ServiceError("boom!")))

when(partitioningServiceMock.getPartitioningCheckpoints(checkpointQueryDTO1))
.thenReturn(ZIO.succeed(Seq(checkpointDTO1, checkpointDTO4)))
.thenReturn(ZIO.succeed(Seq(checkpointDTO1, checkpointDTO2)))
when(partitioningServiceMock.getPartitioningCheckpoints(checkpointQueryDTO2))
.thenReturn(ZIO.succeed(Seq.empty))
when(partitioningServiceMock.getPartitioningCheckpoints(checkpointQueryDTO3))
Expand Down Expand Up @@ -82,7 +82,7 @@ object PartitioningControllerIntegrationTests extends ZIOSpecDefault with TestDa
test("Returns expected Seq[MeasureDTO]") {
for {
result <- PartitioningController.getPartitioningCheckpoints(checkpointQueryDTO1)
} yield assertTrue(result == Seq(checkpointDTO1, checkpointDTO4))
} yield assertTrue(result == Seq(checkpointDTO1, checkpointDTO2))
},
test("Returns expected empty sequence") {
for {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,6 @@ class PartitioningServiceIntegrationTests extends ZIOSpecDefault with TestData {
for {
result <- PartitioningService.getPartitioningCheckpoints(checkpointQueryDTO1)
} yield assertTrue{
println("Results: ", result)
result.isInstanceOf[Seq[CheckpointDTO]]
result == Seq(checkpointDTO1, checkpointDTO2)
}
Expand Down

0 comments on commit 6d8cbe3

Please sign in to comment.