Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
keynmol committed Sep 23, 2024
1 parent 0dd3cf3 commit cfe2da5
Show file tree
Hide file tree
Showing 5 changed files with 68 additions and 39 deletions.
3 changes: 2 additions & 1 deletion backend/project.scala
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
// Main
//> using scala "3.5.0"
//> using scala "3.5.1"

//> using dependency "com.github.vickumar1981:stringdistance_2.13:1.2.7"
//> using dependency "com.indoorvivants::genovese::0.0.2"
Expand All @@ -8,6 +8,7 @@
//> using dependency "com.outr::scribe::3.15.0"
//> using dependency "org.scalameta::munit-diff::1.0.1"
//> using dependency "org.scalameta:scalafmt-core_2.13:3.8.3"
//> using file "../shared/protocol.scala"

//> using resourceDirs "../frontend/dist"

44 changes: 23 additions & 21 deletions backend/server.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,6 @@ import scala.util.Try

import scala.concurrent.ExecutionContext.global as GlobalEC

object Limits:
lazy val MaxFileLength =
sys.env.getOrElse("MAX_FILE_LENGTH_BYTES", "2048").toInt
lazy val MaxPopulation = sys.env.getOrElse("MAX_POPULATION", "100").toInt
lazy val MaxGenerations = sys.env.getOrElse("MAX_GENERATIONS", "100").toInt

object Optimizer extends cask.MainRoutes:
override def port = 9999
override def host: String = "0.0.0.0"
Expand Down Expand Up @@ -104,7 +98,7 @@ object Optimizer extends cask.MainRoutes:
else if attrs.generations > Limits.MaxGenerations then
error(s"Number of generations above maximum [${Limits.MaxGenerations}]")
else
val (id, job) = createJob(attrs, GlobalEC)
val (id, job) = createJob(attrs)
cask.Response(id.toString(), 200)
end if
end doThing
Expand All @@ -127,14 +121,14 @@ object Optimizer extends cask.MainRoutes:
scribe.Logger.root.withMinimumLevel(scribe.Level.Info).replace()

scheduler.scheduleAtFixedRate(
run(cleanupOldJobs),
() => cleanupOldJobs(),
0L,
5L,
TimeUnit.SECONDS
)

scheduler.scheduleAtFixedRate(
run(reportNumberOfJobs),
() => reportNumberOfJobs(),
0L,
1L,
TimeUnit.MINUTES
Expand Down Expand Up @@ -164,15 +158,6 @@ case class Job(
result: Option[TrainingResult]
)

case class Result(
config: String,
formattedFile: String,
fileDiff: String,
configDiff: String,
generation: Int,
generations: Int
) derives ReadWriter

inline def absorb[T](msg: String, f: => T) =
try f
catch
Expand All @@ -189,11 +174,22 @@ def sendJobUpdate(id: UUID, channel: cask.WsChannelActor, cancel: () => Unit) =
)
cancel()
case Some(job) if job.instruction == TrainingInstruction.Halt =>
absorb(
s"sending JobFinished for [$id]",
channel.send(
cask.Ws.Text(upickle.default.write(JobProgress.Finished))
)
)
absorb(
s"closing WS connection for [$id]",
channel.send(cask.Ws.Close())
)
cancel()
case Some(job) if job.result.isEmpty =>
absorb(
s"sending JobStarted for [$id]",
channel.send(cask.Ws.Text(upickle.default.write(JobProgress.Started)))
)
case Some(job) =>
channel.send(cask.Ws.Ping())
job.result.foreach: result =>
Expand All @@ -217,7 +213,7 @@ def sendJobUpdate(id: UUID, channel: cask.WsChannelActor, cancel: () => Unit) =
)
)

val res = Result(
val res = JobProgress.Result(
config = serialised,
formattedFile = result.formattedFile,
fileDiff = diff,
Expand Down Expand Up @@ -254,7 +250,7 @@ case class TrainingResult(
generation: Int
)

def createJob(attrs: JobAttributes, ec: ExecutionContext): (UUID, Job) =
def createJob(attrs: JobAttributes): (UUID, Job) =
val id = generateJobId()

val job = Job(
Expand Down Expand Up @@ -311,7 +307,7 @@ def createJob(attrs: JobAttributes, ec: ExecutionContext): (UUID, Job) =
end handle
end handler

ec.execute: () =>
GlobalEC.execute: () =>
Train(
featureful = summon[Featureful[ScalafmtConfigSubset]],
config = trainingConfig,
Expand Down Expand Up @@ -362,3 +358,9 @@ def cleanupOldJobs() =
def reportNumberOfJobs() =
val nJobs = jobs.size()
if nJobs > 0 then scribe.info(s"Number of active jobs: $nJobs")

object Limits:
lazy val MaxFileLength =
sys.env.getOrElse("MAX_FILE_LENGTH_BYTES", "2048").toInt
lazy val MaxPopulation = sys.env.getOrElse("MAX_POPULATION", "100").toInt
lazy val MaxGenerations = sys.env.getOrElse("MAX_GENERATIONS", "100").toInt
40 changes: 23 additions & 17 deletions frontend/src/main.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ case class JobAttributes(

@main def hello =
val id = Var[Option[String]](None)
val result = Var[Option[Result]](None)
val result = Var[Option[JobProgress]](None)

val jobAttributes = Var(Option.empty[JobAttributes])

Expand All @@ -38,12 +38,16 @@ case class JobAttributes(
SaveState.readString(SaveState.SEED_KEY).toLongOption.getOrElse(8008L)
)

val fileDiff = UpdatableDiff(result.signal.map(_.map(_.fileDiff)))
val configDiff = UpdatableDiff(result.signal.map(_.map(_.configDiff)))
val fileDiff = UpdatableDiff(result.signal.map(_.collect {
case jr: JobProgress.Result => jr.fileDiff
}))
val configDiff = UpdatableDiff(result.signal.map(_.collect {
case jr: JobProgress.Result => jr.configDiff
}))

val error = Var(Option.empty[String])

val updateJob: Signal[Option[Result]] =
val updateJob =
id.signal.flatMapSwitch:
case None => Signal.fromValue(None)
case Some(id) =>
Expand All @@ -52,16 +56,12 @@ case class JobAttributes(

Signal
.fromValue(client.reconnectNow())
.combineWith(client.isConnected)
.flatMapSwitch: isConnected =>
println(isConnected)
if isConnected then
client.received
.tapEach(_ => client.sendOne("ping"))
.map(upickle.default.read[Result](_))
.map(Some(_))
.startWith(None)
else Signal.fromValue(None)
.flatMapSwitch: _ =>
client.received
.tapEach(_ => client.sendOne("ping"))
.map(upickle.default.read[JobProgress](_))
.map(Some(_))
.startWith(None)

val cancelButton =
button(
Expand All @@ -78,7 +78,7 @@ case class JobAttributes(

val trainingStatus =
child <-- result.signal
.combineWith(id.signal)
.withCurrentValueOf(id.signal)
.map: (res, id) =>
res match
case None =>
Expand All @@ -87,10 +87,14 @@ case class JobAttributes(
case Some(value) =>
p("Job submitted, waiting for first update...")

case Some(res) =>
case Some(res: JobProgress.Result) =>
p(
s"Training... generation ${res.generation} out of ${res.generations}"
)
case Some(JobProgress.Started) =>
p("Job started...")
case Some(JobProgress.Finished) =>
p("Job finished")

val errorStatus =
child.maybe <-- error.signal.map(
Expand Down Expand Up @@ -240,7 +244,9 @@ case class JobAttributes(
cls := "flex gap-4 w-full",
div(cls := "w-6/12", "File diff:", fileDiff.element),
div(cls := "w-6/12", "Scalafmt configuration:", configDiff.element)
)
),
id.signal.map("id=" + _.toString).debugLog() --> Observer.empty,
result.signal.map("result=" + _.toString()).debugLog() --> Observer.empty
)

renderOnDomContentLoaded(
Expand Down
1 change: 1 addition & 0 deletions frontend/src/project.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
//> using dep "com.lihaoyi::fansi::0.5.0"
//> using dep "com.raquo::laminar::17.1.0"
//> using dep io.laminext::websocket::0.17.0
//> using file "../../shared/protocol.scala"

// Test
//> using test.dependency "org.scalameta::munit::1.0.0"
Expand Down
19 changes: 19 additions & 0 deletions shared/protocol.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
//> using scala "3.5.0"

//> using dependency "com.lihaoyi::upickle::4.0.1"

import upickle.default.*

enum JobProgress derives ReadWriter:
case Started
case Finished
case Result(
config: String,
formattedFile: String,
fileDiff: String,
configDiff: String,
generation: Int,
generations: Int
)


0 comments on commit cfe2da5

Please sign in to comment.