Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/feature/ContestResultsMenu_impro…
Browse files Browse the repository at this point in the history
…vements' into feature/ContestResultsMenu_improvements
  • Loading branch information
sanyavertolet committed Aug 22, 2022
2 parents f0fba0a + 6c66b1b commit 72547ad
Show file tree
Hide file tree
Showing 86 changed files with 2,167 additions and 868 deletions.
10 changes: 10 additions & 0 deletions .github/workflows/build-base-images.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ on:
schedule:
- cron: '0 4 * * 1'
workflow_dispatch:
inputs:
branch:
type: string
default: master
description: Branch to build images from
required: false

jobs:
build_base_images:
Expand Down Expand Up @@ -42,6 +48,10 @@ jobs:
base_image_tag: '3.10'
steps:
- uses: actions/checkout@v3
- if: github.event_name == 'workflow_dispatch'
name: Prepare to build from branch
run: |
git checkout origin/${{ inputs.branch }}
- uses: docker/login-action@v2
with:
registry: ghcr.io
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import java.nio.file.Files
import java.nio.file.Paths

const val MYSQL_STARTUP_DELAY_MILLIS = 30_000L
const val KAFKA_STARTUP_DELAY_MILLIS = 5_000L

/**
* @param profile deployment profile, used, for example, to start SQL database in dev profile only
Expand Down Expand Up @@ -53,7 +54,28 @@ fun Project.createStackDeployTask(profile: String) {
| environment:
| - "MYSQL_ROOT_PASSWORD=123"
| - "MYSQL_DATABASE=save_cloud"
""".trimMargin()
| zookeeper:
| image: confluentinc/cp-zookeeper:latest
| environment:
| ZOOKEEPER_CLIENT_PORT: 2181
| ZOOKEEPER_TICK_TIME: 2000
| ports:
| - 22181:2181
|
| kafka:
| image: confluentinc/cp-kafka:latest
| depends_on:
| - zookeeper
| ports:
| - 29092:29092
| environment:
| KAFKA_BROKER_ID: 1
| KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
| KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092
| KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
| KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
| KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
""".trimMargin()
} else if (profile == "dev" && it.trim().startsWith("logging:")) {
""
} else {
Expand Down Expand Up @@ -106,9 +128,11 @@ fun Project.createStackDeployTask(profile: String) {
Files.createDirectories(configsDir.resolve("orchestrator"))
Files.createDirectories(configsDir.resolve("preprocessor"))
}
description = "Deploy to docker swarm. If swarm contains more than one node, some registry for built images is required."
description =
"Deploy to docker swarm. If swarm contains more than one node, some registry for built images is required."
// this command puts env variables into compose file
val composeCmd = "docker-compose -f ${rootProject.buildDir}/docker-compose.yaml --env-file ${rootProject.buildDir}/.env config"
val composeCmd =
"docker-compose -f ${rootProject.buildDir}/docker-compose.yaml --env-file ${rootProject.buildDir}/.env config"
val stackCmd = "docker stack deploy --compose-file -" +
if (useOverride && composeOverride.exists()) {
" --compose-file ${composeOverride.canonicalPath}"
Expand All @@ -125,25 +149,43 @@ fun Project.createStackDeployTask(profile: String) {
}

tasks.register<Exec>("stopDockerStack") {
description = "Completely stop all services in docker swarm. NOT NEEDED FOR REDEPLOYING! Use only to explicitly stop everything."
description =
"Completely stop all services in docker swarm. NOT NEEDED FOR REDEPLOYING! Use only to explicitly stop everything."
commandLine("docker", "stack", "rm", "save")
}

// in case you are running it on MAC, first do the following: docker pull --platform linux/x86_64 mysql
tasks.register<Exec>("startMysqlDb") {
tasks.register<Exec>("startMysqlDbService") {
dependsOn("generateComposeFile")
doFirst {
logger.lifecycle("Running the following command: [docker-compose --file $buildDir/docker-compose.yaml up -d mysql]")
}
commandLine("docker-compose", "--file", "$buildDir/docker-compose.yaml", "up", "-d", "mysql")
errorOutput = ByteArrayOutputStream()
doLast {
if (!errorOutput.toString().contains(" is up-to-date")) {
logger.lifecycle("Waiting $MYSQL_STARTUP_DELAY_MILLIS millis for mysql to start")
Thread.sleep(MYSQL_STARTUP_DELAY_MILLIS) // wait for mysql to start, can be manually increased when needed
}
logger.lifecycle("Waiting $MYSQL_STARTUP_DELAY_MILLIS millis for mysql to start")
Thread.sleep(MYSQL_STARTUP_DELAY_MILLIS) // wait for mysql to start, can be manually increased when needed
}
}
tasks.named("liquibaseUpdate") {
mustRunAfter("startMysqlDbService")
}
tasks.register("startMysqlDb") {
dependsOn("liquibaseUpdate")
dependsOn("startMysqlDbService")
}

tasks.register<Exec>("startKafka") {
dependsOn("generateComposeFile")
doFirst {
logger.lifecycle("Running the following command: [docker-compose --file $buildDir/docker-compose.yaml up -d kafka]")
}
errorOutput = ByteArrayOutputStream()
commandLine("docker-compose", "--file", "$buildDir/docker-compose.yaml", "up", "-d", "kafka")
doLast {
logger.lifecycle("Waiting $KAFKA_STARTUP_DELAY_MILLIS millis for kafka to start")
Thread.sleep(KAFKA_STARTUP_DELAY_MILLIS) // wait for kafka to start, can be manually increased when needed
}
finalizedBy("liquibaseUpdate")
}

tasks.register<Exec>("restartMysqlDb") {
Expand All @@ -152,10 +194,26 @@ fun Project.createStackDeployTask(profile: String) {
finalizedBy("startMysqlDb")
}

tasks.register<Exec>("restartKafka") {
dependsOn("generateComposeFile")
commandLine("docker-compose", "--file", "$buildDir/docker-compose.yaml", "rm", "--force", "kafka")
commandLine("docker-compose", "--file", "$buildDir/docker-compose.yaml", "rm", "--force", "zookeeper")
finalizedBy("startKafka")
}

tasks.register<Exec>("deployLocal") {
dependsOn(subprojects.flatMap { it.tasks.withType<BootBuildImage>() })
dependsOn("startMysqlDb")
commandLine("docker-compose", "--file", "$buildDir/docker-compose.yaml", "up", "-d", "orchestrator", "backend", "preprocessor")
commandLine(
"docker-compose",
"--file",
"$buildDir/docker-compose.yaml",
"up",
"-d",
"orchestrator",
"backend",
"preprocessor"
)
}

val componentName = findProperty("save.component") as String?
Expand All @@ -166,7 +224,8 @@ fun Project.createStackDeployTask(profile: String) {
"and it should be a name of one of gradle subprojects. If component name is `save-backend`, then `save-frontend` will be built too" +
" and bundled into save-backend image."
require(componentName in allprojects.map { it.name }) { "Component name should be one of gradle subproject names, but was [$componentName]" }
val buildTask: TaskProvider<BootBuildImage> = project(componentName).tasks.named<BootBuildImage>("bootBuildImage")
val buildTask: TaskProvider<BootBuildImage> =
project(componentName).tasks.named<BootBuildImage>("bootBuildImage")
dependsOn(buildTask)
val serviceName = when (componentName) {
"save-backend", "save-orchestrator", "save-preprocessor" -> "save_${componentName.substringAfter("save-")}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ fun Project.configureVersioning() {
*/
fun Project.versionForDockerImages(): String =
(project.findProperty("dockerTag") as String? ?: version.toString())
.replace(Regex("[^._\\-a-z0-9]"), "-")
.replace(Regex("[^._\\-a-zA-Z0-9]"), "-")

/**
* Register task that reads version of save-cli, either from project property, or from Versions, or latest
Expand Down
2 changes: 1 addition & 1 deletion diktat-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@
- name: TOO_LONG_FUNCTION
enabled: true
configuration:
maxFunctionLength: 35 # max length of function
maxFunctionLength: 55 # max length of function
isIncludeHeader: false # count function's header
- name: TOO_MANY_PARAMETERS
enabled: true
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ services:
- "prometheus-job=api-gateway"
logging: *loki-logging-jvm
prometheus:
image: prom/prometheus:v2.37.0
image: prom/prometheus:v2.38.0
user: root # to access host's docker socket for service discovery, see https://groups.google.com/g/prometheus-users/c/EuEW0qRzXvg/m/0aqKh_ZABQAJ
ports:
- "9090:9090"
Expand All @@ -101,7 +101,7 @@ services:
constraints:
- "node.role==manager"
grafana:
image: grafana/grafana:9.0.7
image: grafana/grafana:9.1.0
ports:
- "9100:3000"
volumes:
Expand Down
8 changes: 8 additions & 0 deletions gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ spring-boot = "2.7.2"
spring-security = "5.7.2"
spring-cloud = "3.1.3"
spring-cloud-kubernetes = "2.1.3"
kafka-client = "3.2.0"
junit = "5.9.0"
diktat = "1.2.3"
detekt = "1.21.0"
Expand Down Expand Up @@ -92,9 +93,14 @@ spring-cloud-starter-gateway = { module = "org.springframework.cloud:spring-clou
spring-cloud-starter-kubernetes-client-config = { module = "org.springframework.cloud:spring-cloud-starter-kubernetes-client-config", version.ref = "spring-cloud-kubernetes" }
spring-boot-starter-oauth2-client = { module = "org.springframework.boot:spring-boot-starter-oauth2-client" }
spring-context-indexer = { module = "org.springframework:spring-context-indexer", version.ref = "spring" }
spring-kafka = {module = "org.springframework.kafka:spring-kafka"}
spring-kafka-test = {module = "org.springframework.kafka:spring-kafka-test"}
spring-web = { module = "org.springframework:spring-web", version.ref = "spring" }
jackson-module-kotlin = { module = "com.fasterxml.jackson.module:jackson-module-kotlin" }

kafka-clients = {module = "org.apache.kafka:kafka-clients", version.ref = "kafka-client"}
kafka212 = {module = "org.apache.kafka:kafka_2.12", version.ref = "kafka-client"}

springdoc-openapi-ui = { module = "org.springdoc:springdoc-openapi-ui", version.ref = "springdoc" }
springdoc-openapi-webflux-ui = { module = "org.springdoc:springdoc-openapi-webflux-ui", version.ref = "springdoc" }
springdoc-openapi-security = { module = "org.springdoc:springdoc-openapi-security", version.ref = "springdoc" }
Expand All @@ -107,6 +113,8 @@ ktor-client-core = { module = "io.ktor:ktor-client-core", version.ref = "ktor" }
ktor-client-curl = { module = "io.ktor:ktor-client-curl", version.ref = "ktor" }
ktor-client-logging = { module = "io.ktor:ktor-client-logging", version.ref = "ktor" }
ktor-client-serialization = { module = "io.ktor:ktor-client-serialization", version.ref = "ktor" }
ktor-client-content-negotiation = { module = "io.ktor:ktor-client-content-negotiation", version.ref = "ktor" }
ktor-serialization-kotlinx-json = { module = "io.ktor:ktor-serialization-kotlinx-json", version.ref = "ktor" }
ktor-client-mock = { module = "io.ktor:ktor-client-mock", version.ref = "ktor" }

# database
Expand Down
6 changes: 4 additions & 2 deletions save-agent/README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# Building
Read the [official docs](https://github.com/JetBrains/kotlin/tree/master/kotlin-native#kotlinnative) and install all dependencies listed in [Prerequisites](https://github.com/JetBrains/kotlin/tree/master/kotlin-native#building-from-source) section.

On windows you'll also need to install msys2 and run `pacman -S mingw-w64-x86_64-curl` to have libcurl for ktor-client.
On ubuntu install `libcurl4-openssl-dev` for ktor client.
On Windows you'll also need to install msys2 and run `pacman -S mingw-w64-x86_64-curl` to have libcurl for ktor-client.
On ubuntu install `libcurl4-openssl-dev` for ktor client.

`save-agent` also requires `unzip` to be present on `$PATH`.
4 changes: 3 additions & 1 deletion save-agent/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@ kotlin {
implementation(libs.save.reporters)
implementation(libs.ktor.client.core)
implementation(libs.ktor.client.curl)
implementation(libs.ktor.client.serialization)
implementation(libs.ktor.client.content.negotiation)
implementation(libs.ktor.serialization.kotlinx.json)
implementation(libs.ktor.client.logging)
implementation(libs.kotlinx.serialization.properties)
implementation(libs.okio)
implementation(libs.kotlinx.datetime)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import kotlinx.serialization.Serializable
* @property cliCommand a command that agent will use to run SAVE cli
* @property debug whether debug logging should be enabled
* @property retry configuration for HTTP request retries
* @property testSuitesDir directory where tests and additional files need to be stored into
* @property logFilePath path to logs of save-cli execution
* @property save additional configuration for save-cli
*/
Expand All @@ -37,6 +38,7 @@ data class AgentConfiguration(
val retry: RetryConfig,
val debug: Boolean = false,
val cliCommand: String,
val testSuitesDir: String,
val logFilePath: String = "logs.txt",
val save: SaveCliConfig = SaveCliConfig(),
) {
Expand Down Expand Up @@ -68,12 +70,14 @@ data class HeartbeatConfig(
* @property additionalDataEndpoint endpoint to post additional data (version etc.) to
* @property executionDataEndpoint endpoint to post execution data to
* @property filesEndpoint endpoint to post debug info to
* @property testSourceSnapshotEndpoint endpoint to download test source snapshots from
*/
@Serializable
data class BackendConfig(
val url: String,
val additionalDataEndpoint: String = "internal/saveAgentVersion",
val executionDataEndpoint: String = "internal/saveTestResult",
val testSourceSnapshotEndpoint: String = "/internal/test-suites-sources/download-snapshot-by-execution-id",
val filesEndpoint: String = "internal/files",
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,24 @@

package com.saveourtool.save.agent

import com.saveourtool.save.agent.utils.ktorLogger
import com.saveourtool.save.agent.utils.logDebugCustom
import com.saveourtool.save.agent.utils.logInfoCustom
import com.saveourtool.save.agent.utils.markAsExecutable
import com.saveourtool.save.agent.utils.readProperties
import com.saveourtool.save.core.config.LogType
import com.saveourtool.save.core.logging.describe
import com.saveourtool.save.core.logging.logType

import generated.SAVE_CLOUD_VERSION
import generated.SAVE_CORE_VERSION
import io.ktor.client.HttpClient
import io.ktor.client.plugins.HttpTimeout
import io.ktor.client.plugins.json.JsonPlugin
import io.ktor.client.plugins.kotlinx.serializer.KotlinxSerializer
import io.ktor.client.plugins.*
import io.ktor.client.plugins.contentnegotiation.*
import io.ktor.client.plugins.logging.*
import io.ktor.serialization.kotlinx.json.*
import okio.FileSystem
import okio.Path.Companion.toPath
import platform.posix.*

import kotlinx.cinterop.staticCFunction
Expand All @@ -42,6 +48,8 @@ internal val json = Json {
}
}

internal val fs = FileSystem.SYSTEM

@OptIn(ExperimentalSerializationApi::class)
fun main() {
val config: AgentConfiguration = Properties.decodeFromStringMap(
Expand All @@ -50,24 +58,14 @@ fun main() {
logType.set(if (config.debug) LogType.ALL else LogType.WARN)
logDebugCustom("Instantiating save-agent version $SAVE_CLOUD_VERSION with config $config")

platform.posix.chmod(
"save-$SAVE_CORE_VERSION-linuxX64.kexe",
(S_IRUSR or S_IWUSR or S_IXUSR or S_IRGRP or S_IROTH).toUInt()
)
"save-$SAVE_CORE_VERSION-linuxX64.kexe".toPath().markAsExecutable()

signal(SIGTERM, staticCFunction<Int, Unit> {
logInfoCustom("Agent is shutting down because SIGTERM has been received")
exit(1)
})

val httpClient = HttpClient {
install(JsonPlugin) {
serializer = KotlinxSerializer(json)
}
install(HttpTimeout) {
requestTimeoutMillis = config.requestTimeoutMillis
}
}
val httpClient = configureHttpClient(config)

runBlocking {
// Launch in a new scope, because we cancel the scope on graceful termination,
Expand All @@ -81,3 +79,28 @@ fun main() {
}
logInfoCustom("Agent is shutting down")
}

@Suppress("FLOAT_IN_ACCURATE_CALCULATIONS", "MagicNumber")
private fun configureHttpClient(agentConfiguration: AgentConfiguration) = HttpClient {
install(ContentNegotiation) {
json(json = json)
}
install(HttpTimeout) {
requestTimeoutMillis = agentConfiguration.requestTimeoutMillis
}
install(HttpRequestRetry) {
retryOnException(maxRetries = agentConfiguration.retry.attempts)
retryOnServerErrors(maxRetries = agentConfiguration.retry.attempts)
exponentialDelay(base = agentConfiguration.retry.initialRetryMillis / 1000.0)
modifyRequest {
if (retryCount > 1) {
val reason = response?.status ?: cause?.describe() ?: "Unknown reason"
logDebugCustom("Retrying request: attempt #$retryCount, reason: $reason")
}
}
}
install(Logging) {
logger = ktorLogger
level = if (agentConfiguration.debug) LogLevel.ALL else LogLevel.INFO
}
}
Loading

0 comments on commit 72547ad

Please sign in to comment.