From 017a71681846b38f586967bf3bce2b45ac3f093b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 12 Aug 2021 12:49:52 +0700 Subject: [PATCH 01/42] update com.typesafe:config:1.4.1 for sbt building code --- project/build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.sbt b/project/build.sbt index 50768ce7f..b74b69430 100644 --- a/project/build.sbt +++ b/project/build.sbt @@ -1,3 +1,3 @@ lazy val apiSbt = SbtShared.sbtApiProject dependsOn(apiSbt) -libraryDependencies += "com.typesafe" % "config" % "1.3.1" +libraryDependencies += "com.typesafe" % "config" % "1.4.1" From 5994003c92d1fad46b3bbc55f5b87fde73039310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 14:06:25 +0700 Subject: [PATCH 02/42] update scala-js 1.7.0 --- project/SbtShared.scala | 2 +- project/project/plugins.sbt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/SbtShared.scala b/project/SbtShared.scala index 08ad2ff75..21faaa882 100644 --- a/project/SbtShared.scala +++ b/project/SbtShared.scala @@ -31,7 +31,7 @@ object SbtShared { } object ScalaJSVersions { - val current = "1.5.1" + val current = "1.7.0" } val distSbtVersion = "1.5.5" diff --git a/project/project/plugins.sbt b/project/project/plugins.sbt index b1639646a..81ca0312e 100644 --- a/project/project/plugins.sbt +++ b/project/project/plugins.sbt @@ -1,3 +1,3 @@ addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.8.0") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.5.1") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.0") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") From ecbf6fc1308bfaff60531a701df4b2543103cd22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 14:14:47 +0700 Subject: [PATCH 03/42] Update sbt-native-packager 1.9.2 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 25d8a1955..bf038412a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,5 +1,5 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") -addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.21") +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.2") addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.0") addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.5.0") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") From 29cd0b5e2b9daa0b3fb7341b77216bb7dbadb575 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 9 Aug 2021 16:42:55 +0700 Subject: [PATCH 04/42] update sbt-assembly 1.0.0 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index bf038412a..b03778282 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,4 +1,4 @@ -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0") addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.2") addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.0") addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.5.0") From 7f83141df04f20c4ee49a5d36375c3c73877ac8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 14:15:26 +0700 Subject: [PATCH 05/42] update sbt-docker 1.8.2 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index b03778282..83b91dedd 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,7 +1,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0") addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.2") addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.0") -addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.5.0") +addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.8.2") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.8.0") addSbtPlugin("org.scala-js" % "sbt-scalajs" % SbtShared.ScalaJSVersions.current) From 91cc3888b78ea393f39132d9c92f4d6f22c9c783 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 14:56:43 +0700 Subject: [PATCH 06/42] remove outdated sbt-docker workarounds --- build.sbt | 9 --- project/DockerBuild.scala | 127 -------------------------------------- 2 files changed, 136 deletions(-) delete mode 100644 project/DockerBuild.scala diff --git a/build.sbt b/build.sbt index 2c946af46..98c2d3172 100644 --- a/build.sbt +++ b/build.sbt @@ -116,15 +116,6 @@ lazy val sbtRunner = project tag = Some(gitHashNow) ) ), - docker := { - val log = Keys.streams.value.log - val dockerPath = (docker / DockerKeys.dockerPath).value - val buildOptions = (docker / DockerKeys.buildOptions).value - val stageDir = (docker / target).value - val dockerfile = (docker / DockerKeys.dockerfile).value - val imageNames = (docker / DockerKeys.imageNames).value - sbtdocker.DockerBuildFixed(dockerfile, sbtdocker.staging.DefaultDockerfileProcessor, imageNames, buildOptions, stageDir, dockerPath, log) - }, docker / dockerfile := Def .task { DockerHelper( diff --git a/project/DockerBuild.scala b/project/DockerBuild.scala deleted file mode 100644 index 5c04d7328..000000000 --- a/project/DockerBuild.scala +++ /dev/null @@ -1,127 +0,0 @@ -package sbtdocker - -import sbt._ -import staging.{DockerfileProcessor, StagedDockerfile} - -import scala.sys.process.{Process, ProcessLogger} - -object DockerBuildFixed { - /** - * Build a Dockerfile using a provided docker binary. - * - * @param dockerfile Dockerfile to build - * @param processor processor to create a staging directory for the Dockerfile - * @param imageNames names of the resulting image - * @param stageDir stage dir - * @param dockerPath path to the docker binary - * @param buildOptions options for the build command - * @param log logger - */ - def apply(dockerfile: DockerfileLike, processor: DockerfileProcessor, imageNames: Seq[ImageName], - buildOptions: BuildOptions, stageDir: File, dockerPath: String, log: Logger): ImageId = { - val staged = processor(dockerfile, stageDir) - - apply(staged, imageNames, buildOptions, stageDir, dockerPath, log) - } - - /** - * Build a Dockerfile using a provided docker binary. - * - * @param staged a staged Dockerfile to build. - * @param imageNames names of the resulting image - * @param stageDir stage dir - * @param dockerPath path to the docker binary - * @param buildOptions options for the build command - * @param log logger - */ - def apply(staged: StagedDockerfile, imageNames: Seq[ImageName], buildOptions: BuildOptions, stageDir: File, dockerPath: String, log: Logger): ImageId = { - log.debug("Building Dockerfile:\n" + staged.instructionsString) - - log.debug(s"Preparing stage directory '${stageDir.getPath}'") - - clean(stageDir) - createDockerfile(staged, stageDir) - prepareFiles(staged) - buildAndTag(imageNames, stageDir, dockerPath, buildOptions, log) - } - - private[sbtdocker] def clean(stageDir: File) = { - IO.delete(stageDir) - } - - private[sbtdocker] def createDockerfile(staged: StagedDockerfile, stageDir: File) = { - IO.write(stageDir / "Dockerfile", staged.instructionsString) - } - - private[sbtdocker] def prepareFiles(staged: StagedDockerfile) = { - staged.stageFiles.foreach { - case (source, destination) => - source.stage(destination) - } - } - - private val SuccessfullyBuilt = "^Successfully built ([0-9a-f]+)$".r - - private[sbtdocker] def buildAndTag(imageNames: Seq[ImageName], stageDir: File, dockerPath: String, buildOptions: BuildOptions, log: Logger): ImageId = { - val processLogger = ProcessLogger({ line => - log.info(line) - }, { line => - log.info(line) - }) - - val imageId = build(stageDir, dockerPath, buildOptions, log, processLogger) - - imageNames.foreach { name => - DockerTag(imageId, name, dockerPath, log) - } - - imageId - } - - private[sbtdocker] def build(stageDir: File, dockerPath: String, buildOptions: BuildOptions, log: Logger, processLogger: ProcessLogger): ImageId = { - val flags = buildFlags(buildOptions) - val command = dockerPath :: "build" :: flags ::: "." :: Nil - log.debug(s"Running command: '${command.mkString(" ")}' in '${stageDir.absString}'") - - val processOutput = Process(command, stageDir).lineStream(processLogger) - processOutput.foreach { line => - log.info(line) - } - - val imageId = processOutput.collect { - case SuccessfullyBuilt(id) => ImageId(id) - }.lastOption - - imageId match { - case Some(id) => - id - case None => - sys.error("Could not parse image id") - } - } - - private[sbtdocker] def buildFlags(buildOptions: BuildOptions): List[String] = { - val cacheFlag = "--no-cache=" + !buildOptions.cache - val removeFlag = { - buildOptions.removeIntermediateContainers match { - case BuildOptions.Remove.Always => - "--force-rm=true" - case BuildOptions.Remove.Never => - "--rm=false" - case BuildOptions.Remove.OnSuccess => - "--rm=true" - } - } - val pullFlag = { - val value = buildOptions.pullBaseImage match { - case BuildOptions.Pull.Always => true - case BuildOptions.Pull.IfMissing => false - } - "--pull=" + value - } - //workaround https://github.com/marcuslonnberg/sbt-docker/issues/74 - val addHosts = List("--add-host", "jenkins.scala-sbt.org:127.0.0.1") - - cacheFlag :: removeFlag :: pullFlag :: addHosts - } -} From eeeb9553fcb614c9abdaaea5937f0fcdcde09de9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 14:15:53 +0700 Subject: [PATCH 07/42] update sbt-revolver 0.9.1 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 83b91dedd..998f16b97 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0") addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.2") -addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.0") +addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.8.2") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.8.0") From 8196b8b5d763b085e95fe04c35931834df1eaf10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 5 Aug 2021 23:22:19 +0700 Subject: [PATCH 08/42] update sbt-buildinfo 0.10.0 --- project/plugins.sbt | 2 +- project/project/plugins.sbt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 998f16b97..a9e8e4917 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,7 +2,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0") addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.2") addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.8.2") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.8.0") addSbtPlugin("org.scala-js" % "sbt-scalajs" % SbtShared.ScalaJSVersions.current) diff --git a/project/project/plugins.sbt b/project/project/plugins.sbt index 81ca0312e..2a75623ae 100644 --- a/project/project/plugins.sbt +++ b/project/project/plugins.sbt @@ -1,3 +1,3 @@ addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.8.0") addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.0") -addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.9.0") +addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") From c2d5d35699c52bb4b244620ffecbd1ca01248ed7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 5 Aug 2021 22:21:05 +0700 Subject: [PATCH 09/42] update scalatest 3.2.9 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 98c2d3172..58704e479 100644 --- a/build.sbt +++ b/build.sbt @@ -34,7 +34,7 @@ lazy val scastie = project lazy val testSettings = Seq( - libraryDependencies += "org.scalatest" %% "scalatest" % "3.1.0" % Test + libraryDependencies += "org.scalatest" %% "scalatest" % "3.2.9" % Test ) lazy val loggingAndTest = From 00ea7f0fcd79dae7cc19dc01e1b119838f136738 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 12 Aug 2021 13:50:56 +0700 Subject: [PATCH 10/42] update codemirror 5.50.0 -> 5.62.2 --- build.sbt | 2 +- client/yarn.lock | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index 58704e479..66770b780 100644 --- a/build.sbt +++ b/build.sbt @@ -216,7 +216,7 @@ lazy val client = project test := {}, Test / loadedTestFrameworks := Map(), Compile / npmDependencies ++= Seq( - "codemirror" -> "5.50.0", + "codemirror" -> "5.62.2", "firacode" -> "1.205.0", "font-awesome" -> "4.7.0", "raven-js" -> "3.11.0", diff --git a/client/yarn.lock b/client/yarn.lock index 3a9df88b7..cd5f6e6e0 100644 --- a/client/yarn.lock +++ b/client/yarn.lock @@ -724,9 +724,10 @@ code-point-at@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" -codemirror@5.50.0: - version "5.50.0" - resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.50.0.tgz#aeacd18f225735b17cbab98908edace87fedcdab" +codemirror@5.62.2: + version "5.62.2" + resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.62.2.tgz#bce6d19c9829e6e788f83886d48ecf5c1e106e65" + integrity sha512-tVFMUa4J3Q8JUd1KL9yQzQB0/BJt7ZYZujZmTPgo/54Lpuq3ez4C8x/ATUY/wv7b7X3AUq8o3Xd+2C5ZrCGWHw== collection-visit@^1.0.0: version "1.0.0" From d1a4b4e07e359cd44d560f8a9d9dbed1c7e81581 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 12 Aug 2021 13:51:30 +0700 Subject: [PATCH 11/42] update scalajs-react 1.7.6 -> 1.7.7 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index 66770b780..aec1f3d20 100644 --- a/build.sbt +++ b/build.sbt @@ -239,7 +239,7 @@ lazy val client = project "webpack-merge" -> "4.1.0", ), libraryDependencies ++= Seq( - "com.github.japgolly.scalajs-react" %%% "extra" % "1.7.6", + "com.github.japgolly.scalajs-react" %%% "extra" % "1.7.7", ) ) .enablePlugins(ScalaJSPlugin, ScalaJSBundlerPlugin) From 6ca8f79c80dbfb3111cac35ffcb0418f060d529c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 14:00:58 +0700 Subject: [PATCH 12/42] update akka 2.6.15 and akka-http 10.2.5 --- build.sbt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index aec1f3d20..4743d2c78 100644 --- a/build.sbt +++ b/build.sbt @@ -1,9 +1,9 @@ import SbtShared._ import com.typesafe.sbt.SbtNativePackager.Universal -def akka(module: String) = "com.typesafe.akka" %% ("akka-" + module) % "2.5.26" +def akka(module: String) = "com.typesafe.akka" %% ("akka-" + module) % "2.6.15" -val akkaHttpVersion = "10.1.11" +val akkaHttpVersion = "10.2.5" addCommandAlias("startAll", "sbtRunner/reStart;server/reStart;client/fastOptJS/startWebpackDevServer") addCommandAlias("startAllProd", "sbtRunner/reStart;server/fullOptJS/reStart") From 44a06fc7fb815b3e56d830739e5c68587824302f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Tue, 10 Aug 2021 17:02:28 +0700 Subject: [PATCH 13/42] update play-json 2.6.14 and 2.10.0-RC5 + 2.6.9 -> 2.6.14 for scala 2.10 + 2.9.0 -> 2.10.0-RC5 for scala 2.12, 2.13 --- project/SbtShared.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/project/SbtShared.scala b/project/SbtShared.scala index 21faaa882..45b4cdd29 100644 --- a/project/SbtShared.scala +++ b/project/SbtShared.scala @@ -159,13 +159,11 @@ object SbtShared { libraryDependencies += { scalaVersion.value match { case v if v.startsWith("2.10") => - "com.typesafe.play" %%% "play-json" % "2.6.9" + "com.typesafe.play" %%% "play-json" % "2.6.14" case v if v.startsWith("2.11") => "com.typesafe.play" %%% "play-json" % "2.7.4" - case v if v.startsWith("3.0") => - "com.typesafe.play" %%% "play-json" % "2.10.0-RC5" case _ => - "com.typesafe.play" %%% "play-json" % "2.9.0" + "com.typesafe.play" %%% "play-json" % "2.10.0-RC5" } }, buildInfoKeys := Seq[BuildInfoKey]( From 52f4f0e7171e171ad4364b29c29d00dfc99dac0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 14:01:57 +0700 Subject: [PATCH 14/42] clean code: Fix a scalas 2.13 deprecated warnings warnings about missing `()` when calling/ overriding some methods --- .../LoadBalancerRecoveryTest.scala | 2 +- .../SnippetsContainerTest.scala | 8 ++++---- .../util/GraphStageLogicForwarder.scala | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala b/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala index c6740839e..c88625d1b 100644 --- a/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala +++ b/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala @@ -154,7 +154,7 @@ class LoadBalancerRecoveryTest() } } - override def afterAll: Unit = { + override def afterAll(): Unit = { TestKit.shutdownActorSystem(webSystem) TestKit.shutdownActorSystem(sbtSystem) TestKit.shutdownActorSystem(system) diff --git a/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala b/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala index c83208861..e26f43ac6 100644 --- a/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala +++ b/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala @@ -99,7 +99,7 @@ class SnippetsContainerTest extends AnyFunSuite with BeforeAndAfterAll { test("update") { val container = testContainer - val user = UserLogin("github-user-update" + Random.nextInt) + val user = UserLogin("github-user-update" + Random.nextInt()) val inputs1 = Inputs.default.copy(code = "inputs1").copy(isShowingInUserProfile = true) val snippetId1 = container.save(inputs1, Some(user)).await @@ -122,8 +122,8 @@ class SnippetsContainerTest extends AnyFunSuite with BeforeAndAfterAll { test("listSnippets") { val container = testContainer - val user = UserLogin("github-user-list" + Random.nextInt) - val user2 = UserLogin("github-user-list2" + Random.nextInt) + val user = UserLogin("github-user-list" + Random.nextInt()) + val user2 = UserLogin("github-user-list2" + Random.nextInt()) val inputs1 = Inputs.default.copy(code = "inputs1") container.save(inputs1, Some(user)).await @@ -153,7 +153,7 @@ class SnippetsContainerTest extends AnyFunSuite with BeforeAndAfterAll { test("delete") { val container = testContainer - val user = UserLogin("github-user-delete" + Random.nextInt) + val user = UserLogin("github-user-delete" + Random.nextInt()) val inputs1 = Inputs.default.copy(code = "inputs1") val snippetId1 = container.save(inputs1, Some(user)).await diff --git a/utils/src/main/scala/com.olegych.scastie/util/GraphStageLogicForwarder.scala b/utils/src/main/scala/com.olegych.scastie/util/GraphStageLogicForwarder.scala index 3f602211b..801ab77a3 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/GraphStageLogicForwarder.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/GraphStageLogicForwarder.scala @@ -28,7 +28,7 @@ class GraphStageLogicForwarder[T: TypeTag, U: TypeTag](out: Outlet[T], shape: So private def deliver(): Unit = if (isAvailable(out) && buffer.nonEmpty) - push[T](out, buffer.dequeue) + push[T](out, buffer.dequeue()) private def bufferElement(receive: (ActorRef, Any)): Unit = receive match { From 044fa6f379e7fb726e415df365c2eb873dee4908 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 29 Jul 2021 23:26:57 +0700 Subject: [PATCH 15/42] clean code: DispatchActor.connectRunner --- .../DispatchActor.scala | 18 ++++++------------ .../util/ReconnectingActor.scala | 8 ++------ 2 files changed, 8 insertions(+), 18 deletions(-) diff --git a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala index ed8991a88..b7891975c 100644 --- a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala +++ b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala @@ -64,12 +64,8 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) private val sbtPorts = (0 until sbtPortsSize).map(sbtPortsStart + _) - private def connectRunner( - runnerName: String, - actorName: String, - host: String - )(port: Int): ((String, Int), ActorSelection) = { - val path = s"akka.tcp://$runnerName@$host:$port/user/$actorName" + private def connectRunner(host: String, port: Int): ((String, Int), ActorSelection) = { + val path = s"akka.tcp://SbtRunner@$host:$port/user/SbtActor" log.info(s"Connecting to ${path}") val selection = context.actorSelection(path) selection ! ActorConnected @@ -77,7 +73,7 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) } private var remoteSbtSelections = - sbtPorts.map(connectRunner("SbtRunner", "SbtActor", host)).toMap + sbtPorts.map(connectRunner(host, _)).toMap private var sbtLoadBalancer: SbtBalancer = { val sbtServers = remoteSbtSelections.to(Vector).map { @@ -131,7 +127,6 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) statusActor ! SbtLoadBalancerUpdate(newSbtBalancer) } sbtLoadBalancer = newSbtBalancer - () } //can be called from future @@ -304,9 +299,7 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) if (!remoteSbtSelections.contains((runnerHostname, runnerAkkaPort))) { log.info("Connected Runner {}", runnerAkkaPort) - val sel = connectRunner("SbtRunner", "SbtActor", runnerHostname)( - runnerAkkaPort - ) + val sel = connectRunner(runnerHostname, runnerAkkaPort) val (_, ref) = sel remoteSbtSelections = remoteSbtSelections + sel @@ -328,7 +321,8 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) case run: Run => run0(run.inputsWithIpAndUser, run.snippetId) - case ping: Ping.type => + + case Ping => implicit val timeout: Timeout = Timeout(10.seconds) logError(Future.sequence { sbtLoadBalancer.servers.map { s => diff --git a/utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala b/utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala index fcc76a811..0b3bce038 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala @@ -50,14 +50,10 @@ trait ActorReconnecting extends Actor with ActorLogging { println("DisassociatedEvent " + ev) val isServerHostname = - reconnectInfo - .map(info => ev.remoteAddress.host.contains(info.serverHostname)) - .getOrElse(false) + reconnectInfo.exists(info => ev.remoteAddress.host.contains(info.serverHostname)) val isServerAkkaPort = - reconnectInfo - .map(info => ev.remoteAddress.port.contains(info.serverAkkaPort)) - .getOrElse(false) + reconnectInfo.exists(info => ev.remoteAddress.port.contains(info.serverAkkaPort)) if (isServerHostname && isServerAkkaPort && ev.inbound) { log.warning("Disconnected from server") From d66bbb5b9d203026d35604a4889811bd22b6f196 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Sun, 1 Aug 2021 12:12:31 +0700 Subject: [PATCH 16/42] clean code: DispatchActor.remoteSbtSelections Don't create class fields just for initializing other field --- .../com.olegych.scastie.balancer/DispatchActor.scala | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala index b7891975c..943079d28 100644 --- a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala +++ b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala @@ -58,11 +58,6 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) private val config = ConfigFactory.load().getConfig("com.olegych.scastie.balancer") - private val host = config.getString("remote-hostname") - private val sbtPortsStart = config.getInt("remote-sbt-ports-start") - private val sbtPortsSize = config.getInt("remote-sbt-ports-size") - - private val sbtPorts = (0 until sbtPortsSize).map(sbtPortsStart + _) private def connectRunner(host: String, port: Int): ((String, Int), ActorSelection) = { val path = s"akka.tcp://SbtRunner@$host:$port/user/SbtActor" @@ -72,8 +67,13 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) (host, port) -> selection } - private var remoteSbtSelections = + private var remoteSbtSelections = { + val host = config.getString("remote-hostname") + val sbtPortsStart = config.getInt("remote-sbt-ports-start") + val sbtPortsSize = config.getInt("remote-sbt-ports-size") + val sbtPorts = (0 until sbtPortsSize).map(sbtPortsStart + _) sbtPorts.map(connectRunner(host, _)).toMap + } private var sbtLoadBalancer: SbtBalancer = { val sbtServers = remoteSbtSelections.to(Vector).map { From 8068c728869b6f9993b07ef8457be951621ab890 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 9 Aug 2021 20:36:39 +0700 Subject: [PATCH 17/42] Fix akka-http 10.2 deprecated warnings --- .../main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala | 2 +- .../scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala | 2 +- .../scala/com.olegych.scastie.web/routes/ProgressRoutes.scala | 2 +- .../scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala index e4be1c44f..5c2b8a135 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala @@ -1,7 +1,7 @@ package com.olegych.scastie.web.routes import akka.actor.{ActorRef, ActorSystem} -import akka.http.scaladsl.coding.Gzip +import akka.http.scaladsl.coding.Coders.Gzip import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{Directive1, Route} import com.olegych.scastie.api._ diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala index 3c4ae974b..c373ff267 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala @@ -1,6 +1,6 @@ package com.olegych.scastie.web.routes -import akka.http.scaladsl.coding.{Gzip, NoCoding} +import akka.http.scaladsl.coding.Coders.{Gzip, NoCoding} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import com.olegych.scastie.api.{SnippetId, SnippetUserPart} diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala index 48ef50e70..742dbf75e 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala @@ -2,7 +2,7 @@ package com.olegych.scastie.web.routes import akka.NotUsed import akka.actor.ActorRef -import akka.http.scaladsl.coding.Gzip +import akka.http.scaladsl.coding.Coders.Gzip import akka.http.scaladsl.marshalling.sse.EventStreamMarshalling._ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.sse.ServerSentEvent diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala index 64fec7ae3..b69f5da16 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala @@ -8,7 +8,7 @@ import akka.pattern.ask import akka.actor.{ActorRef, ActorSystem} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import akka.http.scaladsl.coding.Gzip +import akka.http.scaladsl.coding.Coders.Gzip import scala.concurrent.duration.DurationInt From 950de9eabba8b8d61c0b86c5419c59e5f3fd3f10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Tue, 3 Aug 2021 14:43:18 +0700 Subject: [PATCH 18/42] sbtRunner: fix handling of output because sbt java process not output prompt line --- .../src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala index c9fba49f1..036f0b813 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala @@ -272,7 +272,7 @@ class SbtProcess(runTimeout: FiniteDuration, private def setInputs(inputs: Inputs): Unit = { val prompt = - s"""shellPrompt := {_ => println(""); "$promptUniqueId" + "\\n "}""" + s"""shellPrompt := {_ => println("$promptUniqueId"); "> "}""" writeFile(pluginFile, inputs.sbtPluginsConfig + "\n") writeFile(buildFile, prompt + "\n" + inputs.sbtConfig) From df1f9b4f923b2ab7a65bd10e60485bf46de7bfee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 9 Aug 2021 21:14:40 +0700 Subject: [PATCH 19/42] Fix storate/ listSnippets test case When testing in local machine (not in CI), the order of actual result list is not always [3,2,1] --- .../com.olegych.scastie.storage/SnippetsContainerTest.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala b/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala index e26f43ac6..d8904ee78 100644 --- a/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala +++ b/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala @@ -8,6 +8,7 @@ import java.util.concurrent.Executors import com.olegych.scastie.api._ import org.scalatest.BeforeAndAfterAll import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.must.Matchers._ import scala.concurrent.duration._ import scala.concurrent.{Await, Future} @@ -146,9 +147,7 @@ class SnippetsContainerTest extends AnyFunSuite with BeforeAndAfterAll { container.create(inputs4, Some(user)).await val snippets = container.listSnippets(user).await - assert( - snippets.map(_.summary) == List("inputs3", "inputs2", "inputs1") - ) + snippets.map(_.summary) must contain theSameElementsAs List("inputs1", "inputs2", "inputs3") } test("delete") { From 72182aabe433bcc3a7c9bf0a5e5de868c7706af3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 9 Aug 2021 20:40:30 +0700 Subject: [PATCH 20/42] Rename sbtRunner's application.conf to reference.conf + server use reference.conf instead of application.conf so sbtRunner should use the same naming. + This also permit use config in this file in balancer/test which has sbtRunner as a Test dependency. --- .../src/main/resources/{application.conf => reference.conf} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename sbt-runner/src/main/resources/{application.conf => reference.conf} (95%) diff --git a/sbt-runner/src/main/resources/application.conf b/sbt-runner/src/main/resources/reference.conf similarity index 95% rename from sbt-runner/src/main/resources/application.conf rename to sbt-runner/src/main/resources/reference.conf index b3841c964..f9a8c5af7 100644 --- a/sbt-runner/src/main/resources/application.conf +++ b/sbt-runner/src/main/resources/reference.conf @@ -21,7 +21,7 @@ akka { warn-about-java-serializer-usage = false } remote { - maximum-payload-bytes = 30000000 bytes + maximum-payload-bytes = 30000000 bytes transport = "akka.remote.netty.NettyRemoteTransport" netty.tcp { hostname = ${com.olegych.scastie.sbt.hostname} From 3f9caa2c004058712f55f9fff9b3f93c0bc4e7fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 9 Aug 2021 21:11:34 +0700 Subject: [PATCH 21/42] Migrate to akka-typed and use cluster instead of remote only # Changes + Also use jackson for serialization instead of java-serializer. - Implement in utils/ PlayJackson.scala - Config in utils/ reference.conf - Config removed: warn-about-java-serializer-usage = false + Use Actor discovery [1], so: - Remove ActorReconnecting, SbtPing, SbtPong, SbtRunnerConnect, ActorConnected - server/ balancer don't need to know sbtRunner's host/port. So, we remove some configs such as: balancer.remote-hostname, remote-sbt-ports-start,.. + Type safe configuration: - Use ConfigLoader & EnrichedConfig from playframework. - Add some case classes: BalancerConf (in DispatchActor.scala), SbtConf (in SbtActor.scala) # Configs change: + Add `com.olegych.scastie.data-dir` # Other notes + There is no sender() and parent in akka typed [2] So, we need explicitly add `replyTo` ActorRef to several messages. + Supervision: When an exception is thrown and no supervision strategy is defined, then actor will stop in akka typed instead of restart as in classic [3] So, some actors, such as SbtActor need be explicitly supervising. + SbtProcess: Migrated from classic FSM to typed [4] # Refs [1] https://doc.akka.io/docs/akka/current/typed/actor-discovery.html [2] https://doc.akka.io/docs/akka/current/typed/from-classic.html#sender [3] https://doc.akka.io/docs/akka/current/typed/from-classic.html#supervision [4] https://doc.akka.io/docs/akka/current/typed/fsm.html --- .../com.olegych.scastie.api/ApiModels.scala | 6 - .../SnippetProgress.scala | 5 +- balancer/src/main/resources/reference.conf | 20 +- .../DispatchActor.scala | 437 +++++++++--------- .../ProgressActor.scala | 97 ++-- .../StatusActor.scala | 93 ++-- .../package.scala | 5 +- balancer/src/test/resources/application.conf | 4 - .../LoadBalancerRecoveryTest.scala | 201 ++++---- build.sbt | 29 +- sbt-runner/src/main/resources/reference.conf | 49 +- .../com.olegych.scastie.sbt/FormatActor.scala | 19 +- .../com.olegych.scastie.sbt/SbtActor.scala | 127 +++-- .../com.olegych.scastie.sbt/SbtMain.scala | 96 ++-- .../com.olegych.scastie.sbt/SbtProcess.scala | 241 ++++++---- .../src/test/resources/application.conf | 7 +- .../SbtActorTest.scala | 21 +- server/src/main/resources/reference.conf | 49 +- .../RestApiServer.scala | 54 +-- .../com.olegych.scastie.web/ServerMain.scala | 158 ++++--- .../oauth2/Github.scala | 42 +- .../oauth2/GithubUserSession.scala | 22 +- .../oauth2/InMemoryRefreshTokenStorage.scala | 47 +- .../routes/ApiRoutes.scala | 8 +- .../routes/DownloadRoutes.scala | 18 +- .../routes/OAuth2Routes.scala | 6 +- .../routes/ProgressRoutes.scala | 10 +- .../routes/ScalaJsRoutes.scala | 26 +- .../routes/ScalaLangRoutes.scala | 13 +- .../routes/StatusRoutes.scala | 13 +- utils/src/main/resources/reference.conf | 27 +- .../util/BlockingProcess.scala | 223 ++++----- .../util/ConfigLoaders.scala | 13 + .../util/PlayJackson.scala | 62 +++ .../util/ProcessActor.scala | 130 +++--- .../util/ReconnectingActor.scala | 65 --- .../com.olegych.scastie/util/SbtTask.scala | 25 +- .../com.olegych.scastie/util/ShowConfig.scala | 43 ++ .../ProcessActorTest.scala | 14 +- 39 files changed, 1360 insertions(+), 1165 deletions(-) delete mode 100644 balancer/src/test/resources/application.conf create mode 100644 utils/src/main/scala/com.olegych.scastie/util/ConfigLoaders.scala create mode 100644 utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala delete mode 100644 utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala create mode 100644 utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala diff --git a/api/src/main/scala/com.olegych.scastie.api/ApiModels.scala b/api/src/main/scala/com.olegych.scastie.api/ApiModels.scala index 318a28f93..db0e72d75 100644 --- a/api/src/main/scala/com.olegych.scastie.api/ApiModels.scala +++ b/api/src/main/scala/com.olegych.scastie.api/ApiModels.scala @@ -2,12 +2,6 @@ package com.olegych.scastie.api import play.api.libs.json._ -case object SbtPing -case object SbtPong - -case class SbtRunnerConnect(hostname: String, port: Int) -case object ActorConnected - object SnippetSummary { implicit val formatSnippetSummary: OFormat[SnippetSummary] = Json.format[SnippetSummary] diff --git a/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala b/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala index 9db7b39b3..7408d61d8 100644 --- a/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala +++ b/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala @@ -24,6 +24,9 @@ object SnippetProgress { implicit val formatSnippetProgress: OFormat[SnippetProgress] = Json.format[SnippetProgress] } +// note: ProgressActor.Message alias to this +trait ProgressMessage + case class SnippetProgress( ts: Option[Long], id: Option[Long], @@ -39,7 +42,7 @@ case class SnippetProgress( isTimeout: Boolean, isSbtError: Boolean, isForcedProgramMode: Boolean -) { +) extends ProgressMessage { def isFailure: Boolean = isTimeout || isSbtError || runtimeError.nonEmpty || compilationInfos.exists(_.severity == Error) override def toString: String = Json.toJsObject(this).toString() diff --git a/balancer/src/main/resources/reference.conf b/balancer/src/main/resources/reference.conf index fb65ab1a2..c296b9375 100644 --- a/balancer/src/main/resources/reference.conf +++ b/balancer/src/main/resources/reference.conf @@ -1,11 +1,17 @@ +com.olegych.scastie { + # default parent dir of: + # + balancer.{snippets-dir, old-snippets-dir} - if use files snippets-container + # + web.oauth2.{users-file, sessions-file} + data-dir = ./target + data-dir = ${?DATA_DIR} +} com.olegych.scastie.balancer { snippets-container = files - snippets-dir = ./target/snippets/ - old-snippets-dir = ./target/old-snippets/ - - remote-hostname = "127.0.0.1" - remote-sbt-ports-start = 5150 - remote-sbt-ports-size = 1 + snippets-dir = ${com.olegych.scastie.data-dir}/snippets/ + old-snippets-dir = ${com.olegych.scastie.data-dir}/old-snippets/ } -akka.actor.warn-about-java-serializer-usage = false +akka { + actor.provider = cluster + cluster.downing-provider-class = "akka.cluster.sbr.SplitBrainResolverProvider" +} diff --git a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala index 943079d28..8ced5a125 100644 --- a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala +++ b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala @@ -1,117 +1,114 @@ package com.olegych.scastie.balancer -import java.nio.file.Paths +import java.nio.file.Path import java.time.Instant import java.util.concurrent.Executors - -import akka.actor.{Actor, ActorLogging, ActorRef, ActorSelection, OneForOneStrategy, SupervisorStrategy} -import akka.event -import akka.pattern.ask -import akka.remote.DisassociatedEvent -import akka.util.Timeout +import akka.NotUsed +import akka.actor.typed._ +import akka.actor.typed.scaladsl._ +import akka.actor.typed.receptionist.Receptionist import com.olegych.scastie.api import com.olegych.scastie.api._ import com.olegych.scastie.storage._ import com.olegych.scastie.util._ -import com.typesafe.config.ConfigFactory +import ConfigLoaders._ +import com.typesafe.sslconfig.util.{ConfigLoader, EnrichedConfig} +import org.slf4j.LoggerFactory +import org.slf4j.event.Level import scala.concurrent._ import scala.concurrent.duration._ - -case class Address(host: String, port: Int) -case class SbtConfig(config: String) +import scala.util.{Failure, Try} +import DispatchActor._ case class UserTrace(ip: String, user: Option[User]) case class InputsWithIpAndUser(inputs: Inputs, user: UserTrace) -case class RunSnippet(inputs: InputsWithIpAndUser) -case class SaveSnippet(inputs: InputsWithIpAndUser) -case class UpdateSnippet(snippetId: SnippetId, inputs: InputsWithIpAndUser) -case class DeleteSnippet(snippetId: SnippetId) -case class DownloadSnippet(snippetId: SnippetId) +case class RunSnippet(replyTo: ActorRef[SnippetId], inputs: InputsWithIpAndUser) extends Message +case class SaveSnippet(replyTo: ActorRef[SnippetId], inputs: InputsWithIpAndUser) extends Message +case class UpdateSnippet(replyTo: ActorRef[Option[SnippetId]], snippetId: SnippetId, inputs: InputsWithIpAndUser) extends Message +case class DeleteSnippet(replyTo: ActorRef[Boolean], snippetId: SnippetId) extends Message +case class DownloadSnippet(replyTo: ActorRef[Option[Path]], snippetId: SnippetId) extends Message -case class ForkSnippet(snippetId: SnippetId, inputs: InputsWithIpAndUser) +case class ForkSnippet(replyTo: ActorRef[Option[SnippetId]], snippetId: SnippetId, inputs: InputsWithIpAndUser) extends Message -case class FetchSnippet(snippetId: SnippetId) -case class FetchOldSnippet(id: Int) -case class FetchUserSnippets(user: User) +case class FetchSnippet(replyTo: ActorRef[Option[FetchResult]], snippetId: SnippetId) extends Message +case class FetchOldSnippet(replyTo: ActorRef[Option[FetchResult]], id: Int) extends Message +case class FetchUserSnippets(replyTo: ActorRef[List[SnippetSummary]], user: User) extends Message -case class ReceiveStatus(requester: ActorRef) +case class ReceiveStatus(replyTo: ActorRef[LoadBalancerInfo], requester: ActorRef[StatusProgress]) extends Message -case class Run(inputsWithIpAndUser: InputsWithIpAndUser, snippetId: SnippetId) +object DispatchActor { + type Message = BalancerMessage -case class Done(progress: api.SnippetProgress, retries: Int) + private case class Run(inputsWithIpAndUser: InputsWithIpAndUser, snippetId: SnippetId) extends Message -case object Ping + private case class Done(progress: api.SnippetProgress, retries: Int) extends Message -class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) -// extends PersistentActor with AtLeastOnceDelivery - extends Actor - with ActorLogging { + private case class ListingResponse(listing: Receptionist.Listing) extends Message - override def supervisorStrategy: SupervisorStrategy = OneForOneStrategy() { - case e => - log.error(e, "failure") - SupervisorStrategy.resume + object Adapter { + case class FetchScalaJs(replyTo: ActorRef[Option[FetchResultScalaJs]], snippetId: SnippetId) extends Message + case class FetchScalaSource(replyTo: ActorRef[Option[FetchResultScalaSource]], snippetId: SnippetId) extends Message + case class FetchScalaJsSourceMap(replyTo: ActorRef[Option[FetchResultScalaJsSourceMap]], snippetId: SnippetId) extends Message } - private val config = - ConfigFactory.load().getConfig("com.olegych.scastie.balancer") + def apply( + progressActor: ActorRef[ProgressActor.Message], + statusActor: ActorRef[StatusActor.Message], + config: BalancerConf + ): Behavior[Message] = + Behaviors.supervise[Message] { + Behaviors.setup { ctx => + Behaviors.withTimers { timers => + Behaviors.logMessages( + LogOptions().withLevel(Level.INFO), + new DispatchActor(progressActor, statusActor, config)(ctx, timers) + ) + } + } + }.onFailure(SupervisorStrategy.resume) +} - private def connectRunner(host: String, port: Int): ((String, Int), ActorSelection) = { - val path = s"akka.tcp://SbtRunner@$host:$port/user/SbtActor" - log.info(s"Connecting to ${path}") - val selection = context.actorSelection(path) - selection ! ActorConnected - (host, port) -> selection - } +class DispatchActor( + progressActor: ActorRef[ProgressActor.Message], + statusActor: ActorRef[StatusActor.Message], + config: BalancerConf +)(ctx: ActorContext[Message], + timers: TimerScheduler[Message] +) extends AbstractBehavior(ctx) { + import context.{system, messageAdapter, self, executionContext, log} - private var remoteSbtSelections = { - val host = config.getString("remote-hostname") - val sbtPortsStart = config.getInt("remote-sbt-ports-start") - val sbtPortsSize = config.getInt("remote-sbt-ports-size") - val sbtPorts = (0 until sbtPortsSize).map(sbtPortsStart + _) - sbtPorts.map(connectRunner(host, _)).toMap - } + // context.log is not thread safe + // https://doc.akka.io/docs/akka/current/typed/logging.html#how-to-log + private val safeLog = LoggerFactory.getLogger(classOf[DispatchActor]) - private var sbtLoadBalancer: SbtBalancer = { - val sbtServers = remoteSbtSelections.to(Vector).map { - case (_, ref) => - val state: SbtState = SbtState.Unknown - Server(ref, Inputs.default, state) - } + system.receptionist ! Receptionist.Subscribe( + Services.SbtRunner, + messageAdapter[Receptionist.Listing](ListingResponse) + ) - LoadBalancer(servers = sbtServers) - } + private var remoteRunners = Set.empty[ActorRef[SbtMessage]] - import context._ + private var sbtLoadBalancer: SbtBalancer = LoadBalancer(Vector.empty) - system.scheduler.schedule(0.seconds, 30.seconds) { - self ! Ping - } + statusActor ! SetDispatcher(self) - override def preStart(): Unit = { - statusActor ! SetDispatcher(self) - context.system.eventStream.subscribe(self, classOf[DisassociatedEvent]) - super.preStart() + override def onSignal: PartialFunction[Signal, Behavior[Message]] = { + case PostStop => + container.close() + Behaviors.unhandled } - override def postStop(): Unit = { - super.postStop() - container.close() - } - - val containerType = config.getString("snippets-container") - private val container = - containerType match { - case "memory" => new InMemorySnippetsContainer - case "mongo" => new MongoDBSnippetsContainer(ExecutionContext.fromExecutor(Executors.newWorkStealingPool())) - case "files" => + config.snippetsContainer match { + case SnippetsType.Memory => new InMemorySnippetsContainer + case SnippetsType.Mongo => new MongoDBSnippetsContainer(ExecutionContext.fromExecutor(Executors.newWorkStealingPool())) + case f: SnippetsType.Files => new FilesSnippetsContainer( - Paths.get(config.getString("snippets-dir")), - Paths.get(config.getString("old-snippets-dir")) + f.snippetsDir, + f.oldSnippetsDir )( ExecutionContext.fromExecutorService( Executors.newCachedThreadPool() @@ -146,194 +143,174 @@ class DispatchActor(progressActor: ActorRef, statusActor: ActorRef) case Some((server, newBalancer)) => updateSbtBalancer(newBalancer) - server.ref.tell( - SbtTask(snippetId, inputs, ip, user.map(_.login), progressActor), - self - ) + server.ref ! SbtTask(snippetId, inputs, ip, user.map(_.login), progressActor, self) case _ => () } } - private def logError[T](f: Future[T]) = { - f.recover { - case e => log.error(e, "failed future") - } + private val logError: PartialFunction[Try[_], _] = { + case Failure(e) => safeLog.error("failed future", e) } - def receive: Receive = event.LoggingReceive(event.Logging.InfoLevel) { - case SbtPong => () - - case format: FormatRequest => - val server = sbtLoadBalancer.getRandomServer - server.foreach(_.ref.tell(format, sender())) - () - - case x @ RunSnippet(inputsWithIpAndUser) => - log.info(s"starting ${x}") - val InputsWithIpAndUser(inputs, UserTrace(_, user)) = inputsWithIpAndUser - val sender = this.sender() - logError(container.create(inputs, user.map(u => UserLogin(u.login))).map { snippetId => - sender ! snippetId - run(inputsWithIpAndUser, snippetId) - log.info(s"finished ${x}") - }) - - case SaveSnippet(inputsWithIpAndUser) => - val InputsWithIpAndUser(inputs, UserTrace(_, user)) = inputsWithIpAndUser - val sender = this.sender() - logError(container.save(inputs, user.map(u => UserLogin(u.login))).map { snippetId => - sender ! snippetId - run(inputsWithIpAndUser, snippetId) - }) - - case UpdateSnippet(snippetId, inputsWithIpAndUser) => - val sender = this.sender() - logError(container.update(snippetId, inputsWithIpAndUser.inputs).map { updatedSnippetId => - sender ! updatedSnippetId - updatedSnippetId.foreach( - snippetIdU => run(inputsWithIpAndUser, snippetIdU) - ) - }) + override def onMessage(msg: Message): Behavior[Message] = { + msg match { + case format: FormatReq => + val server = sbtLoadBalancer.getRandomServer + server.foreach(_.ref ! format) + + case RunSnippet(sender, inputsWithIpAndUser) => + val InputsWithIpAndUser(inputs, UserTrace(_, user)) = inputsWithIpAndUser + container.create(inputs, user.map(u => UserLogin(u.login))).map { snippetId => + sender ! snippetId + run(inputsWithIpAndUser, snippetId) + }.andThen(logError) + + case SaveSnippet(sender, inputsWithIpAndUser) => + val InputsWithIpAndUser(inputs, UserTrace(_, user)) = inputsWithIpAndUser + container.save(inputs, user.map(u => UserLogin(u.login))).map { snippetId => + sender ! snippetId + run(inputsWithIpAndUser, snippetId) + }.andThen(logError) + + case UpdateSnippet(sender, snippetId, inputsWithIpAndUser) => + container.update(snippetId, inputsWithIpAndUser.inputs).map { updatedSnippetId => + sender ! updatedSnippetId + updatedSnippetId.foreach( + snippetIdU => run(inputsWithIpAndUser, snippetIdU) + ) + }.andThen(logError) - case ForkSnippet(snippetId, inputsWithIpAndUser) => - val InputsWithIpAndUser(inputs, UserTrace(_, user)) = - inputsWithIpAndUser - val sender = this.sender() - logError( + case ForkSnippet(sender, snippetId, inputsWithIpAndUser) => + val InputsWithIpAndUser(inputs, UserTrace(_, user)) = + inputsWithIpAndUser container .fork(snippetId, inputs, user.map(u => UserLogin(u.login))) .map { forkedSnippetId => sender ! Some(forkedSnippetId) run(inputsWithIpAndUser, forkedSnippetId) - } - ) + }.andThen(logError) - case DeleteSnippet(snippetId) => - val sender = this.sender() - logError(container.deleteAll(snippetId).map(_ => sender ! (()))) + case DeleteSnippet(sender, snippetId) => + container.deleteAll(snippetId).map(_ => sender ! true).andThen(logError) - case DownloadSnippet(snippetId) => - val sender = this.sender() - logError(container.downloadSnippet(snippetId).map(sender ! _)) + case DownloadSnippet(sender, snippetId) => + container.downloadSnippet(snippetId).map(sender ! _).andThen(logError) - case FetchSnippet(snippetId) => - val sender = this.sender() - logError(container.readSnippet(snippetId).map(sender ! _)) + case FetchSnippet(sender, snippetId) => + container.readSnippet(snippetId).map(sender ! _).andThen(logError) - case FetchOldSnippet(id) => - val sender = this.sender() - logError(container.readOldSnippet(id).map(sender ! _)) + case FetchOldSnippet(sender, id) => + container.readOldSnippet(id).map(sender ! _).andThen(logError) - case FetchUserSnippets(user) => - val sender = this.sender() - logError(container.listSnippets(UserLogin(user.login)).map(sender ! _)) + case FetchUserSnippets(sender, user) => + container.listSnippets(UserLogin(user.login)).map(sender ! _).andThen(logError) - case FetchScalaJs(snippetId) => - val sender = this.sender() - logError(container.readScalaJs(snippetId).map(sender ! _)) + case Adapter.FetchScalaJs(sender, snippetId) => + container.readScalaJs(snippetId).map(sender ! _).andThen(logError) - case FetchScalaSource(snippetId) => - val sender = this.sender() - logError(container.readScalaSource(snippetId).map(sender ! _)) + case Adapter.FetchScalaSource(sender, snippetId) => + container.readScalaSource(snippetId).map(sender ! _).andThen(logError) - case FetchScalaJsSourceMap(snippetId) => - val sender = this.sender() - logError(container.readScalaJsSourceMap(snippetId).map(sender ! _)) + case Adapter.FetchScalaJsSourceMap(sender, snippetId) => + container.readScalaJsSourceMap(snippetId).map(sender ! _).andThen(logError) - case progress: api.SnippetProgress => - val sender = this.sender() - if (progress.isDone) { - self ! Done(progress, retries = 100) - } - logError( + case SnippetProgressAsk(sender, progress) => + if (progress.isDone) { + self ! Done(progress, retries = 100) + } container .appendOutput(progress) .recover { - case e => - log.error(e, s"failed to save $progress from $sender") - e + case e => log.error(s"failed to save $progress from $sender", e) } - .map(sender ! _) - ) - - case done: Done => - done.progress.snippetId.foreach { sid => - val newBalancer = sbtLoadBalancer.done(TaskId(sid)) - newBalancer match { - case Some(newBalancer) => - updateSbtBalancer(newBalancer) - case None => - if (done.retries >= 0) { - system.scheduler.scheduleOnce(1.second) { - self ! done.copy(retries = done.retries - 1) + .map(_ => sender ! NotUsed) + .andThen(logError) + + case done: Done => + done.progress.snippetId.foreach { sid => + val newBalancer = sbtLoadBalancer.done(TaskId(sid)) + newBalancer match { + case Some(newBalancer) => + updateSbtBalancer(newBalancer) + case None => + if (done.retries >= 0) { + timers.startSingleTimer( + done.copy(retries = done.retries - 1), + 1.second + ) + } else { + val taskIds = + sbtLoadBalancer.servers.flatMap(_.mailbox.map(_.taskId)) + log.error(s"stopped retrying to update ${taskIds} with ${done}") } - } else { - val taskIds = - sbtLoadBalancer.servers.flatMap(_.mailbox.map(_.taskId)) - log.error(s"stopped retrying to update ${taskIds} with ${done}") - } + } } - } - case event: DisassociatedEvent => - for { - host <- event.remoteAddress.host - port <- event.remoteAddress.port - ref <- remoteSbtSelections.get((host, port)) - } { - log.warning("removing disconnected: {}", ref) - val previousRemoteSbtSelections = remoteSbtSelections - remoteSbtSelections = remoteSbtSelections - ((host, port)) - if (previousRemoteSbtSelections != remoteSbtSelections) { - updateSbtBalancer(sbtLoadBalancer.removeServer(ref)) + case ListingResponse(Services.SbtRunner.Listing(listings)) => + val added = listings diff remoteRunners + val removed = remoteRunners diff listings + if (added.nonEmpty) { + log.info("Runners added {}", added) } - } + if (removed.nonEmpty){ + log.warn("Runners removed {}", removed) + } + if (added.nonEmpty || removed.nonEmpty) { + remoteRunners = listings + val newBalancer = LoadBalancer( + sbtLoadBalancer.servers.filterNot(s => removed.contains(s.ref)) ++ + added.map(Server(_, Inputs.default, SbtState.Unknown: SbtState)) + ) - case SbtUp => - log.info("SbtUp") + updateSbtBalancer(newBalancer) + } + // only for testing + if (!serviceRegistered && remoteRunners.nonEmpty) { + ctx.system.receptionist ! Receptionist.Register(Services.Balancer, ctx.self) + serviceRegistered = true + } - case Replay(SbtRun(snippetId, inputs, progressActor, snippetActor)) => - log.info("Replay: " + inputs.code) + case ReceiveStatus(replyTo, requester) => + replyTo ! LoadBalancerInfo(sbtLoadBalancer, requester) - case SbtRunnerConnect(runnerHostname, runnerAkkaPort) => - if (!remoteSbtSelections.contains((runnerHostname, runnerAkkaPort))) { - log.info("Connected Runner {}", runnerAkkaPort) + case run: Run => + run0(run.inputsWithIpAndUser, run.snippetId) + } + + this + } - val sel = connectRunner(runnerHostname, runnerAkkaPort) - val (_, ref) = sel + private[this] var serviceRegistered = false +} - remoteSbtSelections = remoteSbtSelections + sel +case class BalancerConf( + snippetsContainer: SnippetsType, +) +object BalancerConf { + import SnippetsType._ + implicit val loader: ConfigLoader[BalancerConf] = (c: EnrichedConfig) => BalancerConf( + c.get[String]("snippets-container") match { + case "memory" => Memory + case "mongo" => Mongo + case "files" => Files( + c.get[Path]("snippets-dir"), + c.get[Path]("old-snippets-dir"), + ) + case _ => + println("fallback to in-memory container") + Memory + } + ) +} - val state: SbtState = SbtState.Unknown +sealed trait SnippetsType +object SnippetsType { + case object Memory extends SnippetsType - updateSbtBalancer( - sbtLoadBalancer.addServer( - Server(ref, Inputs.default, state) - ) - ) - } + case object Mongo extends SnippetsType - case ReceiveStatus(requester) => - sender() ! LoadBalancerInfo(sbtLoadBalancer, requester) - - case statusProgress: StatusProgress => - statusActor ! statusProgress - - case run: Run => - run0(run.inputsWithIpAndUser, run.snippetId) - - case Ping => - implicit val timeout: Timeout = Timeout(10.seconds) - logError(Future.sequence { - sbtLoadBalancer.servers.map { s => - (s.ref ? SbtPing) - .map { _ => - log.info(s"pinged ${s.ref} server") - } - .recover { - case e => log.error(e, s"couldn't ping ${s} server") - } - } - }) - } + case class Files( + snippetsDir: Path, + oldSnippetsDir: Path, + ) extends SnippetsType } diff --git a/balancer/src/main/scala/com.olegych.scastie.balancer/ProgressActor.scala b/balancer/src/main/scala/com.olegych.scastie.balancer/ProgressActor.scala index bde039147..d5302e92b 100644 --- a/balancer/src/main/scala/com.olegych.scastie.balancer/ProgressActor.scala +++ b/balancer/src/main/scala/com.olegych.scastie.balancer/ProgressActor.scala @@ -2,59 +2,102 @@ package com.olegych.scastie package balancer import akka.NotUsed -import akka.actor.{Actor, ActorRef} +import akka.{actor => classic} +import akka.actor.typed.{ActorRef, Behavior, SupervisorStrategy} +import akka.actor.typed.scaladsl.{AbstractBehavior, ActorContext, Behaviors} +import akka.actor.typed.scaladsl.adapter._ import akka.stream.scaladsl.Source import com.olegych.scastie.api._ import com.olegych.scastie.util.GraphStageForwarder import scala.collection.mutable.{Map => MMap, Queue => MQueue} import scala.concurrent.duration.DurationLong +import ProgressActor.{Message, ProgressSource} -case class SubscribeProgress(snippetId: SnippetId) -private case class Cleanup(snippetId: SnippetId) +case class SubscribeProgress(snippetId: SnippetId, replyTo: ActorRef[ProgressSource]) extends Message -class ProgressActor extends Actor { +object ProgressActor { type ProgressSource = Source[SnippetProgress, NotUsed] - private val subscribers = MMap.empty[SnippetId, (ProgressSource, Option[ActorRef])] + type Message = ProgressMessage + private case class Cleanup(snippetId: SnippetId) extends Message + + private case class GraphStageForwarderMsg( + snippedId: SnippetId, + graphStageForwarderActor: ActorRef[SnippetProgress] + ) extends Message + + def apply(): Behavior[Message] = + Behaviors.supervise { + Behaviors.setup(new ProgressActor(_)) + }.onFailure(SupervisorStrategy.resume) +} + +import ProgressActor._ +class ProgressActor private (ctx: ActorContext[Message]) extends AbstractBehavior[Message](ctx) { + import context.{self, executionContext} + + private val subscribers = MMap.empty[SnippetId, (ProgressSource, Option[ActorRef[SnippetProgress]])] private val queuedMessages = MMap.empty[SnippetId, MQueue[SnippetProgress]] - override def receive: Receive = { - case SubscribeProgress(snippetId) => - val (source, _) = getOrCreateNewSubscriberInfo(snippetId, self) - sender() ! source + override def onMessage(msg: Message): Behavior[Message] = { + msg match { + case SubscribeProgress(snippetId, replyTo) => + replyTo ! getOrCreateNewSubscriberInfo(snippetId) - case snippetProgress: SnippetProgress => - snippetProgress.snippetId.foreach { snippetId => - getOrCreateNewSubscriberInfo(snippetId, self) - queuedMessages.getOrElseUpdate(snippetId, MQueue()).enqueue(snippetProgress) - sendQueuedMessages(snippetId, self) - } + case snippetProgress: SnippetProgress => + snippetProgress.snippetId.foreach { snippetId => + getOrCreateNewSubscriberInfo(snippetId) + queuedMessages.getOrElseUpdate(snippetId, MQueue()).enqueue(snippetProgress) + sendQueuedMessages(snippetId) + } + + case GraphStageForwarderMsg(snippedId, graphStageForwarderActor) => + subscribers.get(snippedId).foreach { s => + subscribers.update( + snippedId, + s.copy(_2 = Some(graphStageForwarderActor)) + ) + } + sendQueuedMessages(snippedId) - case (snippedId: SnippetId, graphStageForwarderActor: ActorRef) => - subscribers.get(snippedId).foreach(s => subscribers.update(snippedId, s.copy(_2 = Some(graphStageForwarderActor)))) - sendQueuedMessages(snippedId, self) + case Cleanup(snippetId) => + subscribers.remove(snippetId) + queuedMessages.remove(snippetId) + } - case Cleanup(snippetId) => - subscribers.remove(snippetId) - queuedMessages.remove(snippetId) + this } - private def getOrCreateNewSubscriberInfo(snippetId: SnippetId, self: ActorRef): (ProgressSource, Option[ActorRef]) = { + private def getOrCreateNewSubscriberInfo(snippetId: SnippetId): ProgressSource = subscribers.getOrElseUpdate( snippetId, - Source.fromGraph(new GraphStageForwarder("outlet-graph-" + snippetId, self, snippetId)) -> None - ) - } + Source.fromGraph( + new GraphStageForwarder( + "outlet-graph-" + snippetId, + //send from com.olegych.scastie.util.GraphStageLogicForwarder.preStart + context.messageAdapter[(SnippetId, classic.ActorRef)] { + case (snippedId, graphStageForwarderActor) => + GraphStageForwarderMsg(snippedId, graphStageForwarderActor.toTyped[SnippetProgress]) + }.toClassic, + snippetId + ) + ) -> None + )._1 - private def sendQueuedMessages(snippetId: SnippetId, self: ActorRef): Unit = + private def sendQueuedMessages(snippetId: SnippetId): Unit = for { messageQueue <- queuedMessages.get(snippetId).toSeq (_, Some(graphStageForwarderActor)) <- subscribers.get(snippetId).toSeq message <- messageQueue.dequeueAll(_ => true) } yield { graphStageForwarderActor ! message - if (message.isDone) context.system.scheduler.scheduleOnce(3.seconds, self, Cleanup(snippetId))(context.dispatcher) + if (message.isDone) { + context.system.scheduler.scheduleOnce( + 3.seconds, + () => { self ! Cleanup(snippetId) } + ) + } } } diff --git a/balancer/src/main/scala/com.olegych.scastie.balancer/StatusActor.scala b/balancer/src/main/scala/com.olegych.scastie.balancer/StatusActor.scala index 23b39dbdf..935b0168f 100644 --- a/balancer/src/main/scala/com.olegych.scastie.balancer/StatusActor.scala +++ b/balancer/src/main/scala/com.olegych.scastie.balancer/StatusActor.scala @@ -2,7 +2,12 @@ package com.olegych.scastie.balancer import com.olegych.scastie.api._ -import akka.actor.{Actor, ActorLogging, ActorRef, Props} +import StatusProgress.KeepAlive +import akka.{actor => classic} +import akka.NotUsed +import akka.actor.typed.{ActorRef, Behavior, SupervisorStrategy} +import akka.actor.typed.scaladsl.{AbstractBehavior, ActorContext, Behaviors} +import akka.actor.typed.scaladsl.adapter._ import akka.stream.scaladsl.Source import java.util.concurrent.TimeUnit @@ -10,55 +15,77 @@ import scala.collection.mutable import scala.concurrent.duration._ import com.olegych.scastie.util.GraphStageForwarder +import StatusActor._ -case object SubscribeStatus +case class SubscribeStatus(replyTo: ActorRef[Source[KeepAlive.type, NotUsed]]) extends Message -case class SbtLoadBalancerUpdate(newSbtBalancer: SbtBalancer) -case class LoadBalancerInfo(sbtBalancer: SbtBalancer, requester: ActorRef) +case class SbtLoadBalancerUpdate(newSbtBalancer: SbtBalancer) extends Message +case class LoadBalancerInfo(sbtBalancer: SbtBalancer, requester: ActorRef[StatusProgress]) extends Message -case class SetDispatcher(dispatchActor: ActorRef) +case class SetDispatcher(dispatchActor: ActorRef[ReceiveStatus]) extends Message object StatusActor { - def props: Props = Props(new StatusActor) + sealed trait Message + + private case class GraphStageForwarderMsg( + graphStageForwarderActor: ActorRef[StatusProgress] + ) extends Message + + def apply(): Behavior[Message] = + Behaviors.supervise { + Behaviors.setup(new StatusActor(_)) + }.onFailure(SupervisorStrategy.resume) } -class StatusActor private () extends Actor with ActorLogging { - private var publishers = mutable.Buffer.empty[ActorRef] - private var dispatchActor: Option[ActorRef] = None +class StatusActor private (ctx: ActorContext[Message]) extends AbstractBehavior[Message](ctx) { + import context.self - override def receive: Receive = { - case SubscribeStatus => { + private val publishers = mutable.Buffer.empty[ActorRef[StatusProgress]] - val publisherGraphStage = - new GraphStageForwarder("StatusActor-GraphStageForwarder", self, None) + private var dispatchActor: Option[ActorRef[ReceiveStatus]] = None - val source = - Source - .fromGraph(publisherGraphStage) - .keepAlive( - FiniteDuration(1, TimeUnit.SECONDS), - () => StatusProgress.KeepAlive + override def onMessage(msg: Message): Behavior[Message] = { + msg match { + case SubscribeStatus(replyTo) => + val publisherGraphStage = + new GraphStageForwarder( + "StatusActor-GraphStageForwarder", + //send from com.olegych.scastie.util.GraphStageLogicForwarder.preStart + context.messageAdapter[(None.type, classic.ActorRef)] { + case (_, publisher) => GraphStageForwarderMsg(publisher.toTyped[StatusProgress]) + }.toClassic, + None ) - sender() ! source - } + val source = + Source + .fromGraph(publisherGraphStage) + .keepAlive( + FiniteDuration(1, TimeUnit.SECONDS), + () => StatusProgress.KeepAlive + ) - case (None, publisher: ActorRef) => { - publishers += publisher - dispatchActor.foreach(_ ! ReceiveStatus(publisher)) - } + replyTo ! source - case SbtLoadBalancerUpdate(newSbtBalancer) => { - publishers.foreach(_ ! convertSbt(newSbtBalancer)) - } + case GraphStageForwarderMsg(publisher) => { + publishers += publisher + dispatchActor.foreach(_ ! ReceiveStatus(self, publisher)) + } - case LoadBalancerInfo(sbtBalancer, requester) => { - requester ! convertSbt(sbtBalancer) - } + case SbtLoadBalancerUpdate(newSbtBalancer) => { + publishers.foreach(_ ! convertSbt(newSbtBalancer)) + } - case SetDispatcher(dispatchActorReference) => { - dispatchActor = Some(dispatchActorReference) + case LoadBalancerInfo(sbtBalancer, requester) => { + requester ! convertSbt(sbtBalancer) + } + + case SetDispatcher(dispatchActorReference) => { + dispatchActor = Some(dispatchActorReference) + } } + + this } private def convertSbt(newSbtBalancer: SbtBalancer): StatusProgress = { diff --git a/balancer/src/main/scala/com.olegych.scastie.balancer/package.scala b/balancer/src/main/scala/com.olegych.scastie.balancer/package.scala index 974a7cd1e..738a922ee 100644 --- a/balancer/src/main/scala/com.olegych.scastie.balancer/package.scala +++ b/balancer/src/main/scala/com.olegych.scastie.balancer/package.scala @@ -1,8 +1,9 @@ package com.olegych.scastie -import akka.actor.ActorSelection +import akka.actor.typed.ActorRef import com.olegych.scastie.api.SbtState +import com.olegych.scastie.util.SbtMessage package object balancer { - type SbtBalancer = LoadBalancer[ActorSelection, SbtState] + type SbtBalancer = LoadBalancer[ActorRef[SbtMessage], SbtState] } diff --git a/balancer/src/test/resources/application.conf b/balancer/src/test/resources/application.conf deleted file mode 100644 index c71f51805..000000000 --- a/balancer/src/test/resources/application.conf +++ /dev/null @@ -1,4 +0,0 @@ -com.olegych.scastie.balancer { - snippets-container = files -} -akka.actor.provider = akka.actor.LocalActorRefProvider \ No newline at end of file diff --git a/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala b/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala index c88625d1b..714bc79c0 100644 --- a/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala +++ b/balancer/src/test/scala/com.olegych.scastie.balancer/LoadBalancerRecoveryTest.scala @@ -1,12 +1,16 @@ package com.olegych.scastie.balancer -import akka.actor.{ActorSystem, Props} -import akka.pattern.ask -import akka.testkit.{ImplicitSender, TestKit, TestProbe} +import akka.actor.typed.{ActorRef, ActorSystem, Behavior, Scheduler} +import akka.actor.typed.receptionist.Receptionist +import akka.actor.typed.scaladsl.Behaviors +import akka.actor.typed.scaladsl.AskPattern.Askable +import akka.actor.testkit.typed.scaladsl.{ActorTestKit, TestProbe} +import akka.actor.testkit.typed.FishingOutcome +import com.typesafe.sslconfig.util.EnrichedConfig import akka.util.Timeout import com.olegych.scastie.api._ import com.olegych.scastie.sbt._ -import com.olegych.scastie.util.ReconnectInfo +import com.olegych.scastie.util.Services import com.typesafe.config.{Config, ConfigFactory} import org.scalatest.BeforeAndAfterAll import org.scalatest.funsuite.AnyFunSuiteLike @@ -15,14 +19,9 @@ import scala.concurrent._ import scala.concurrent.duration._ class LoadBalancerRecoveryTest() - extends TestKit( - ActorSystem("LoadBalancerRecoveryTest", RemotePortConfig(0)) - ) - with ImplicitSender - with AnyFunSuiteLike + extends AnyFunSuiteLike with BeforeAndAfterAll { - // import system.dispatcher implicit val timeout = Timeout(10.seconds) test("recover from crash") { @@ -49,64 +48,76 @@ class LoadBalancerRecoveryTest() waitFor(sid1, ret)(_.isDone) waitFor(sid2, ret)(_.isTimeout) - // waitFor(sid2)(_.isDone) waitFor(sid3, ret)(_.isDone) } - private val serverAkkaPort = 15000 - private val webSystem = ActorSystem("Web", RemotePortConfig(serverAkkaPort)) - - private val sbtAkkaPort = 5150 - private val sbtSystem = - ActorSystem("SbtRunner", RemotePortConfig(sbtAkkaPort)) - - private val progressActor = TestProbe() - private val statusActor = TestProbe() - private val sbtActorReadyProbe = TestProbe() - - private val localhost = "127.0.0.1" - - private val sbtActor = - sbtSystem.actorOf( - Props( - new SbtActor( - system = sbtSystem, - runTimeout = 10.seconds, - sbtReloadTimeout = 20.seconds, - isProduction = false, - readyRef = Some(sbtActorReadyProbe.ref), - reconnectInfo = Some( - ReconnectInfo( - serverHostname = localhost, - serverAkkaPort = serverAkkaPort, - actorHostname = localhost, - actorAkkaPort = sbtAkkaPort - ) - ) + private val config = EnrichedConfig( + ConfigFactory.load().getConfig("com.olegych.scastie") + ) + private val testingConfig = TestingConfig(config.get[String]("system-name")) + import testingConfig._ + + private val sbtSystem = ActorSystem( + SbtActor(config.get[SbtConf]("sbt")), + systemName, + testingConfig(sbtAkkaPort) + ) + + object WebSystem { + sealed trait Message + case class AskProgressActor(replyTo: ActorRef[TestProbe[ProgressActor.Message]]) extends Message + + def apply(): Behavior[Message] = + Behaviors.setup { context => + // progressActor and statusActor need be in same system as DispatchActor + // otherwise, akka will complain about serializing ActerRef + implicit def system: ActorSystem[_] = context.system + val progressActor = TestProbe[ProgressActor.Message]() + val statusActor = TestProbe[StatusActor.Message]() + /* val dispatchActor = */ context.spawn( + DispatchActor( + progressActor.ref, + statusActor.ref, + config.get[BalancerConf]("balancer"), + ), + "DispatchActor" ) - ), - name = "SbtActor" - ) - - sbtActorReadyProbe.fishForMessage(60.seconds) { - case SbtActorReady => { - println("sbt ready") - true - } - case msg => { - println("***") - println(msg) - println("***") - false - } + Behaviors.receiveMessage { + case AskProgressActor(replyTo) => + replyTo ! progressActor + Behaviors.same + } + } } + private val webSystem = ActorSystem( + WebSystem(), + systemName, + testingConfig(serverAkkaPort) + ) + + private val testKit = ActorTestKit(webSystem) + private val runnersReadyProbe = testKit.createTestProbe[Receptionist.Listing]() + testKit.system.receptionist ! Receptionist.Subscribe( + Services.Balancer, + runnersReadyProbe.ref + ) + + // wait sbt runners available in dispatchActor before sending RunSnippet to it private val dispatchActor = - webSystem.actorOf( - Props(new DispatchActor(progressActor.ref, statusActor.ref)), - name = "DispatchActor" - ) + runnersReadyProbe.fishForMessage(60.seconds) { + case Services.Balancer.Listing(listings) => + if (listings.isEmpty) { + FishingOutcome.ContinueAndIgnore + } else { + println("runners are ready") + FishingOutcome.Complete + } + }.head match { + case Services.Balancer.Listing(refs) => refs.head + } + implicit val scheduler: Scheduler = webSystem.scheduler private var id = 0 private def run(code: String): SnippetId = { val wrapped = @@ -118,62 +129,66 @@ class LoadBalancerRecoveryTest() val inputs = Inputs.default.copy(code = wrapped, _isWorksheetMode = false) - val task = RunSnippet( - InputsWithIpAndUser(inputs, UserTrace("ip-" + id, None)) + val taskAsk = dispatchActor.ask( + RunSnippet(_, InputsWithIpAndUser(inputs, UserTrace("ip-" + id, None))) ) id += 1 - Await.result( - (dispatchActor.ask(task)).mapTo[SnippetId], - 10.seconds - ) + Await.result(taskAsk, 10.seconds) } + import WebSystem._ + private val progressActor = Await.result(webSystem.ask(AskProgressActor), 10.seconds) + private def waitFor(sid: SnippetId, ret: Map[SnippetId, String])( f: SnippetProgress => Boolean - ): Unit = { - - progressActor.fishForMessage(50.seconds) { - case progress: SnippetProgress => { + ) = + progressActor.fishForMessagePF(50.seconds) { + case progress: SnippetProgress => if (progress.snippetId.get != sid) { - println() - println() - println("*******************************") - println("expected: " + ret(sid)) - println("got: " + ret(progress.snippetId.get)) - println("*******************************") - println() - println() - - assert(false) + FishingOutcome.Fail(s"""\n + |******************************* + |expected: ${ret(sid)} + |got: ${ret(progress.snippetId.get)} + |*******************************\n\n""".stripMargin) + } else if (f(progress)) { + FishingOutcome.Complete + } else { + FishingOutcome.ContinueAndIgnore } - - f(progress) - } } - } override def afterAll(): Unit = { - TestKit.shutdownActorSystem(webSystem) - TestKit.shutdownActorSystem(sbtSystem) - TestKit.shutdownActorSystem(system) + ActorTestKit.shutdown(webSystem) + ActorTestKit.shutdown(sbtSystem) } } -object RemotePortConfig { +private case class TestingConfig(systemName: String) { + val serverAkkaPort = 15000 + val sbtAkkaPort = 5150 + def apply(port: Int): Config = ConfigFactory.parseString( s"""|akka { - | actor { - | provider = "akka.remote.RemoteActorRefProvider" - | } | remote { - | netty.tcp { + | artery.canonical { | hostname = "127.0.0.1" | port = $port | } | } - |}""".stripMargin + | cluster { + | seed-nodes = [ + | "akka://$systemName@127.0.0.1:$serverAkkaPort", + | "akka://$systemName@127.0.0.1:$sbtAkkaPort"] + | jmx.enabled = off + | } + |} + |com.olegych.scastie.sbt { + | run-timeout = 10s + | sbtReloadTimeout = 20s + |} + |""".stripMargin ) } diff --git a/build.sbt b/build.sbt index 4743d2c78..5c8a49511 100644 --- a/build.sbt +++ b/build.sbt @@ -1,9 +1,9 @@ import SbtShared._ import com.typesafe.sbt.SbtNativePackager.Universal -def akka(module: String) = "com.typesafe.akka" %% ("akka-" + module) % "2.6.15" - -val akkaHttpVersion = "10.2.5" +def akka(module: String) = "com.typesafe.akka" %% ("akka-" + module) % ( + if(module.startsWith("http")) "10.2.5" else "2.6.15" +) addCommandAlias("startAll", "sbtRunner/reStart;server/reStart;client/fastOptJS/startWebpackDevServer") addCommandAlias("startAllProd", "sbtRunner/reStart;server/fullOptJS/reStart") @@ -53,12 +53,12 @@ lazy val utils = project .settings( resolvers += Resolver.typesafeRepo("releases"), libraryDependencies ++= Seq( + akka("serialization-jackson"), akka("protobuf"), - akka("stream"), - akka("actor"), - akka("remote"), + akka("stream-typed"), + akka("cluster-typed"), akka("slf4j"), - akka("testkit") % Test + akka("actor-testkit-typed") % Test ) ) .dependsOn(api.jvm(ScalaVersions.jvm)) @@ -103,10 +103,7 @@ lazy val sbtRunner = project reStart := reStart.dependsOn(runnerRuntimeDependencies: _*).evaluated, resolvers += Resolver.sonatypeRepo("public"), libraryDependencies ++= Seq( - akka("actor"), - akka("testkit") % Test, - akka("remote"), - akka("slf4j"), + akka("actor-testkit-typed") % Test, "org.scalameta" %% "scalafmt-core" % "3.0.0-RC6" ), docker / imageNames := Seq( @@ -153,13 +150,11 @@ lazy val server = project reStart / javaOptions += "-Xmx512m", maintainer := "scalacenter", libraryDependencies ++= Seq( - "com.typesafe.akka" %% "akka-http" % akkaHttpVersion, + akka("http"), "com.softwaremill.akka-http-session" %% "core" % "0.5.10", "ch.megard" %% "akka-http-cors" % "0.4.2", - akka("remote"), - akka("slf4j"), - akka("testkit") % Test, - "com.typesafe.akka" %% "akka-http-testkit" % akkaHttpVersion % Test + akka("actor-testkit-typed") % Test, + akka("http-testkit") % Test ) ) .enablePlugins(JavaServerAppPackaging) @@ -170,7 +165,7 @@ lazy val balancer = project .settings(loggingAndTest) .settings(smallRunnerRuntimeDependenciesInTest) .settings( - libraryDependencies += akka("testkit") % Test + libraryDependencies += akka("actor-testkit-typed") % Test ) .dependsOn(api.jvm(ScalaVersions.jvm), utils, storage, sbtRunner % Test) diff --git a/sbt-runner/src/main/resources/reference.conf b/sbt-runner/src/main/resources/reference.conf index f9a8c5af7..92819b646 100644 --- a/sbt-runner/src/main/resources/reference.conf +++ b/sbt-runner/src/main/resources/reference.conf @@ -1,39 +1,28 @@ -com.olegych.scastie { - sbt { - hostname = "127.0.0.1" - hostname = ${?RUNNER_HOSTNAME} - akka-port = 5150 - akka-port = ${?RUNNER_PORT} - - reconnect = false - reconnect = ${?RUNNER_RECONNECT} - - production = false - production = ${?RUNNER_PRODUCTION} - } +com.olegych.scastie.sbt { + sbtReloadTimeout = 100s + runTimeout = 30s + production = false + production = ${?RUNNER_PRODUCTION} } akka { loggers = ["akka.event.slf4j.Slf4jLogger"] loglevel = "INFO" - actor { - provider = "akka.remote.RemoteActorRefProvider" - warn-about-java-serializer-usage = false - } - remote { - maximum-payload-bytes = 30000000 bytes - transport = "akka.remote.netty.NettyRemoteTransport" - netty.tcp { - hostname = ${com.olegych.scastie.sbt.hostname} - port = ${com.olegych.scastie.sbt.akka-port} - - bind-hostname = ${com.olegych.scastie.sbt.hostname} - bind-port = ${com.olegych.scastie.sbt.akka-port} - - message-frame-size = 30000000b - send-buffer-size = 30000000b - receive-buffer-size = 30000000b + actor.provider = cluster + cluster.downing-provider-class = "akka.cluster.sbr.SplitBrainResolverProvider" + remote.artery { + canonical { + hostname = "127.0.0.1" + port = 5150 + } + advanced { maximum-frame-size = 30000000b } } + cluster.seed-nodes = [ + # this node + "akka://"${com.olegych.scastie.system-name}"@"${akka.remote.artery.canonical.hostname}":"${akka.remote.artery.canonical.port}, + # remote node + "akka://"${com.olegych.scastie.system-name}"@127.0.0.1:15000" + ] } diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/FormatActor.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/FormatActor.scala index e6187a064..8b130064b 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/FormatActor.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/FormatActor.scala @@ -1,14 +1,15 @@ package com.olegych.scastie package sbt -import akka.actor.Actor +import akka.actor.typed.{Behavior, SupervisorStrategy} +import akka.actor.typed.scaladsl.Behaviors import com.olegych.scastie.api.{FormatRequest, FormatResponse, ScalaTarget} -import org.scalafmt.config.ScalafmtRunner.Dialect +import com.olegych.scastie.util.FormatReq import org.scalafmt.config.{ScalafmtConfig, ScalafmtRunner} import org.scalafmt.{Formatted, Scalafmt} import org.slf4j.LoggerFactory -class FormatActor() extends Actor { +object FormatActor { private val log = LoggerFactory.getLogger(getClass) private def format(code: String, isWorksheetMode: Boolean, scalaTarget: ScalaTarget): Either[String, String] = { @@ -32,8 +33,12 @@ class FormatActor() extends Actor { } } - override def receive: Receive = { - case FormatRequest(code, isWorksheetMode, scalaTarget) => - sender() ! FormatResponse(format(code, isWorksheetMode, scalaTarget)) - } + def apply(): Behavior[FormatReq] = + Behaviors.supervise { + Behaviors.receiveMessage[FormatReq] { + case FormatReq(sender, FormatRequest(code, isWorksheetMode, scalaTarget)) => + sender ! FormatResponse(format(code, isWorksheetMode, scalaTarget)) + Behaviors.same + } + }.onFailure(SupervisorStrategy.resume) } diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala index b2477e929..e8b67496e 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala @@ -1,95 +1,78 @@ package com.olegych.scastie.sbt -import com.olegych.scastie.api._ +import akka.actor.typed.receptionist.Receptionist import com.olegych.scastie.util._ -import akka.actor.{Actor, ActorContext, ActorLogging, ActorRef, ActorSelection, ActorSystem, Props} +import com.olegych.scastie.util.ConfigLoaders._ +import com.typesafe.sslconfig.util.{ConfigLoader, EnrichedConfig} +import akka.actor.typed.{Behavior, PostStop, Signal, SupervisorStrategy} +import akka.actor.typed.scaladsl.{AbstractBehavior, ActorContext, Behaviors} +import com.olegych.scastie.util.FormatReq +import com.olegych.scastie.sbt.SbtProcess.SbtTaskEvent -import scala.concurrent.duration._ +import scala.concurrent.duration.FiniteDuration -case object SbtActorReady +object SbtActor { + type Message = SbtMessage -class SbtActor(system: ActorSystem, - runTimeout: FiniteDuration, - sbtReloadTimeout: FiniteDuration, - isProduction: Boolean, - readyRef: Option[ActorRef], - override val reconnectInfo: Option[ReconnectInfo]) - extends Actor - with ActorLogging - with ActorReconnecting { + def apply(config: SbtConf): Behavior[Message] = + Behaviors.supervise { + Behaviors.setup[Message] { ctx => + ctx.system.receptionist ! Receptionist.Register(Services.SbtRunner, ctx.self) - def balancer(context: ActorContext, info: ReconnectInfo): ActorSelection = { - import info._ - context.actorSelection( - s"akka.tcp://Web@$serverHostname:$serverAkkaPort/user/DispatchActor" - ) - } - - override def tryConnect(context: ActorContext): Unit = { - if (isProduction) { - reconnectInfo.foreach { info => - import info._ - balancer(context, info) ! SbtRunnerConnect(actorHostname, actorAkkaPort) + new SbtActor(config)(ctx) } - } - } - - override def preStart(): Unit = { - log.info("*** SbtRunner preStart ***") + }.onFailure(SupervisorStrategy.restart) +} - readyRef.foreach(_ ! SbtActorReady) - super.preStart() - } +import SbtActor._ +class SbtActor private ( + config: SbtConf +)(ctx: ActorContext[Message]) extends AbstractBehavior[Message](ctx) { + import context.log - override def postStop(): Unit = { - log.info("*** SbtRunner postStop ***") + log.info("*** SbtRunner preStart ***") - super.postStop() + override def onSignal: PartialFunction[Signal, Behavior[Message]] = { + case PostStop => + log.info("*** SbtRunner postStop ***") + Behaviors.unhandled } - private val formatActor = - context.actorOf(Props(new FormatActor()), name = "FormatActor") + private val formatActor = context.spawn(FormatActor(), name = "FormatActor") private val sbtRunner = - context.actorOf( - Props( - new SbtProcess( - runTimeout, - sbtReloadTimeout, - isProduction, - javaOptions = Seq("-Xms512m", "-Xmx1g") - ) + context.spawn( + SbtProcess( + config.runTimeout, + config.sbtReloadTimeout, + config.production, + javaOptions = Seq("-Xms512m", "-Xmx1g") ), name = "SbtRunner" ) - override def receive: Receive = reconnectBehavior orElse [Any, Unit] { - case SbtPing => { - sender() ! SbtPong - } + override def onMessage(msg: Message): Behavior[Message] = { + msg match { + case format: FormatReq => + formatActor ! format - case format: FormatRequest => { - formatActor.forward(format) - } - - case task: SbtTask => { - sbtRunner.forward(task) - } - - case SbtUp => { - log.info("SbtUp") - reconnectInfo.foreach { info => - log.info("SbtUp sent") - balancer(context, info) ! SbtUp - } - } - - case replay: Replay => { - log.info("Replay") - reconnectInfo.foreach { info => - log.info("Replay sent") - balancer(context, info) ! replay - } + case task: SbtTask => + sbtRunner ! SbtTaskEvent(task) } + this } } + +case class SbtConf( + production: Boolean, + runTimeout: FiniteDuration, + sbtReloadTimeout: FiniteDuration, +) + +object SbtConf { + implicit val loader: ConfigLoader[SbtConf] = (c: EnrichedConfig) => SbtConf( + c.get[Boolean]("production"), + c.get[FiniteDuration]("runTimeout"), + c.get[FiniteDuration]("sbtReloadTimeout") + ) +} diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala index 44ae1a84e..4693a5856 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala @@ -1,90 +1,52 @@ package com.olegych.scastie.sbt +import akka.actor.typed.scaladsl.Behaviors import com.olegych.scastie.util.ScastieFileUtil.writeRunningPid -import com.olegych.scastie.util.ReconnectInfo - -import akka.actor.{ActorSystem, Props} +import com.olegych.scastie.util.ShowConfig +import com.typesafe.sslconfig.util.EnrichedConfig +import akka.actor.typed.{ActorSystem, Behavior} import com.typesafe.config.ConfigFactory import scala.concurrent.Await import scala.concurrent.duration._ -import java.util.concurrent.TimeUnit - import org.slf4j.LoggerFactory object SbtMain { def main(args: Array[String]): Unit = { val logger = LoggerFactory.getLogger(getClass) - val system = ActorSystem("SbtRunner") - - val config2 = ConfigFactory.load().getConfig("akka.remote.netty.tcp") - logger.info("akka tcp config") - logger.info(" '" + config2.getString("hostname") + "'") - logger.info(" " + config2.getInt("port")) - - val config = ConfigFactory.load().getConfig("com.olegych.scastie") - - val serverConfig = config.getConfig("web") - val sbtConfig = config.getConfig("sbt") - - val isProduction = sbtConfig.getBoolean("production") - - val isReconnecting = sbtConfig.getBoolean("reconnect") + val config = EnrichedConfig( + ConfigFactory.load().getConfig("com.olegych.scastie") + ) + val sbtConf = config.get[SbtConf]("sbt") - if (isProduction) { + if (sbtConf.production) { val pid = writeRunningPid() logger.info(s"Starting sbtRunner pid: $pid") } - val runTimeout = { - val timeunit = TimeUnit.SECONDS - FiniteDuration( - sbtConfig.getDuration("runTimeout", timeunit), - timeunit - ) - } - - val sbtReloadTimeout = { - val timeunit = TimeUnit.SECONDS - FiniteDuration( - sbtConfig.getDuration("sbtReloadTimeout", timeunit), - timeunit - ) - } - - val reconnectInfo = - ReconnectInfo( - serverHostname = serverConfig.getString("hostname"), - serverAkkaPort = serverConfig.getInt("akka-port"), - actorHostname = sbtConfig.getString("hostname"), - actorAkkaPort = sbtConfig.getInt("akka-port") - ) - - logger.info(" runTimeout: {}", runTimeout) - logger.info(" sbtReloadTimeout: {}", sbtReloadTimeout) - logger.info(" isProduction: {}", isProduction) - logger.info(" runner hostname: {}", reconnectInfo.actorHostname) - logger.info(" runner port: {}", reconnectInfo.actorAkkaPort) - logger.info(" server hostname: {}", reconnectInfo.serverHostname) - logger.info(" server port: {}", reconnectInfo.serverAkkaPort) - - system.actorOf( - Props( - new SbtActor( - system = system, - runTimeout = runTimeout, - sbtReloadTimeout = sbtReloadTimeout, - isProduction = isProduction, - readyRef = None, - reconnectInfo = Some(reconnectInfo) - ) - ), - name = "SbtActor" + val system = ActorSystem[Nothing]( + Guardian(sbtConf), + config.get[String]("system-name") ) - Await.result(system.whenTerminated, Duration.Inf) + logger.info(ShowConfig(system.settings.config, + """|# Scastie sbt runner started with config: + |akka.remote.artery { + | canonical + | bind + |} + |com.olegych.scastie.sbt + |""".stripMargin)) - () + Await.result(system.whenTerminated, Duration.Inf) } } + +private object Guardian { + def apply(conf: SbtConf): Behavior[Nothing] = + Behaviors.setup[Nothing] { ctx => + ctx.spawn(SbtActor(conf), "SbtActor") + Behaviors.empty + } +} diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala index 036f0b813..15d5d31bf 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala @@ -2,24 +2,20 @@ package com.olegych.scastie.sbt import java.nio.file._ import java.time.Instant - -import akka.actor.{ActorRef, Cancellable, FSM, Stash} -import akka.pattern.ask +import akka.actor.typed.scaladsl.AskPattern.Askable +import akka.actor.typed.{ActorRef, Behavior, SupervisorStrategy} +import akka.actor.typed.scaladsl.{ActorContext, Behaviors, StashBuffer, TimerScheduler} import akka.util.Timeout import com.olegych.scastie.api._ import com.olegych.scastie.instrumentation.InstrumentedInputs import com.olegych.scastie.util.ScastieFileUtil.{slurp, write} import com.olegych.scastie.util._ +import org.slf4j.LoggerFactory import scala.concurrent.duration._ import scala.util.Random object SbtProcess { - sealed trait SbtState - case object Initializing extends SbtState - case object Ready extends SbtState - case object Reloading extends SbtState - case object Running extends SbtState sealed trait Data case class SbtData(currentInputs: Inputs) extends Data @@ -27,21 +23,19 @@ object SbtProcess { snippetId: SnippetId, inputs: Inputs, isForcedProgramMode: Boolean, - progressActor: ActorRef, - snippetActor: ActorRef, - timeoutEvent: Option[Cancellable] + progressActor: ActorRef[SnippetProgress], + snippetActor: ActorRef[SnippetProgressAsk], + timeoutKey: Option[Long] ) extends Data - case class SbtStateTimeout(duration: FiniteDuration, state: SbtState) { - def message: String = { - val stateMsg = - state match { - case Reloading => "updating build configuration" - case Running => "running code" - case _ => sys.error(s"unexpected timeout in state $state") - } - s"timed out after $duration when $stateMsg" - } + sealed trait Event + + case class SbtTaskEvent(v: SbtTask) extends Event + + case class ProcessOutputEvent(v: ProcessOutput) extends Event + + case class SbtStateTimeout(duration: FiniteDuration, stateMsg: String) extends Event { + def message: String = s"timed out after $duration when $stateMsg" def toProgress(snippetId: SnippetId): SnippetProgress = { SnippetProgress.default.copy( @@ -59,18 +53,41 @@ object SbtProcess { ) } } + + /** Let it die and restart the actor */ + final class LetItDie(msg: String) extends Exception(msg) + + def apply(runTimeout: FiniteDuration, + reloadTimeout: FiniteDuration, + isProduction: Boolean, + javaOptions: Seq[String], + customSbtDir: Option[Path] = None + ): Behavior[Event] = + Behaviors.withStash(100) { buffer => + Behaviors.supervise[Event] { + Behaviors.setup { ctx => + Behaviors.withTimers { timers => + new SbtProcess(runTimeout, reloadTimeout, isProduction, javaOptions, customSbtDir)(ctx, buffer, timers)() + } + } + }.onFailure(SupervisorStrategy.restart) + } } -class SbtProcess(runTimeout: FiniteDuration, - reloadTimeout: FiniteDuration, - isProduction: Boolean, - javaOptions: Seq[String], - customSbtDir: Option[Path] = None) - extends FSM[SbtProcess.SbtState, SbtProcess.Data] - with Stash { +import SbtProcess._ +class SbtProcess private ( + runTimeout: FiniteDuration, + reloadTimeout: FiniteDuration, + isProduction: Boolean, + javaOptions: Seq[String], + customSbtDir: Option[Path] +)(context: ActorContext[Event], buffer: StashBuffer[Event], timers: TimerScheduler[Event]) { import ProcessActor._ - import SbtProcess._ - import context.dispatcher + import context.{executionContext, log} + + // context.log is not thread safe + // https://doc.akka.io/docs/akka/current/typed/logging.html#how-to-log + private val safeLog = LoggerFactory.getLogger(classOf[SbtProcess]) private var progressId = 0L @@ -79,10 +96,11 @@ class SbtProcess(runTimeout: FiniteDuration, val p = _p.copy(id = Some(progressId)) run.progressActor ! p implicit val tm = Timeout(10.seconds) - (run.snippetActor ? p) + implicit val sc = context.system.scheduler + run.snippetActor.ask(SnippetProgressAsk(_, p)) .recover { case e => - log.error(e, s"error while saving progress $p") + safeLog.error(s"error while saving progress $p", e) } } @@ -107,17 +125,15 @@ class SbtProcess(runTimeout: FiniteDuration, slurp(sbtDir.resolve(ScalaTarget.Js.sourceMapFilename)) } - startWith( - Initializing, { - InstrumentedInputs(Inputs.default) match { - case Right(instrumented) => - val inputs = instrumented.inputs - setInputs(inputs) - val p = process - log.info(s"started process ${p}") - SbtData(inputs) - case e => sys.error("failed to instrument default input: " + e) - } + def apply(): Behavior[Event] = initializing( + InstrumentedInputs(Inputs.default) match { + case Right(instrumented) => + val inputs = instrumented.inputs + setInputs(inputs) + val p = process + log.info(s"started process ${p}") + SbtData(inputs) + case e => sys.error("failed to instrument default input: " + e) } ) @@ -129,53 +145,63 @@ class SbtProcess(runTimeout: FiniteDuration, "-Dsbt.banner=false", )).mkString(" ") - val props = - ProcessActor.props( + context.spawn( + ProcessActor( + replyTo = context.messageAdapter(ProcessOutputEvent), command = List("sbt"), workingDir = sbtDir, environment = Map( "SBT_OPTS" -> sbtOpts ) - ) - - context.actorOf(props, name = s"sbt-process-$promptUniqueId") + ), + name = s"sbt-process-$promptUniqueId" + ) } - whenUnhandled { - case Event(_: SbtTask | _: ProcessOutput, _) => - stash() - stay() - case Event(timeout: SbtStateTimeout, run: SbtRun) => - println("*** timeout ***") - - val progress = timeout.toProgress(run.snippetId) - sendProgress(run, progress) - throw new Exception(timeout.message) + def unhandled(data: Data): PartialFunction[Event, Behavior[Event]] = { + case e @ (_: SbtTaskEvent | _: ProcessOutputEvent) => + buffer.stash(e) + Behaviors.same + + case timeout: SbtStateTimeout => + data match { + case run: SbtRun => + println("*** timeout ***") + val progress = timeout.toProgress(run.snippetId) + sendProgress(run, progress) + throw new LetItDie(timeout.message) + case _ => + log.error(s"Unexpected timeout $timeout - when data=$data") + Behaviors.same + } } - onTransition { - case _ -> Ready => - println("-- Ready --") - unstashAll() - case _ -> Initializing => - println("-- Initializing --") - case _ -> Reloading => - println("-- Reloading --") - case _ -> Running => - println("-- Running --") + def initializing(data: SbtData): Behavior[Event] = { + println("-- Initializing --") + Behaviors.receiveMessage { + _initializing(data) orElse unhandled(data) + } } - when(Initializing) { - case Event(out: ProcessOutput, _) => + private def _initializing(data: SbtData): PartialFunction[Event, Behavior[Event]] = { + case ProcessOutputEvent(out) => if (isPrompt(out.line)) { - goto(Ready) + ready(data) } else { - stay() + Behaviors.same } } - when(Ready) { - case Event(task @ SbtTask(snippetId, taskInputs, ip, login, progressActor), SbtData(stateInputs)) => + def ready(data: SbtData): Behavior[Event] = { + println("-- Ready --") + buffer.unstashAll(Behaviors.receiveMessage { + _ready(data) orElse unhandled(data) + }) + } + + private def _ready(data: SbtData): PartialFunction[Event, Behavior[Event]] = { + case SbtTaskEvent(SbtTask(snippetId, taskInputs, ip, login, progressActor, snippetActor)) => + val SbtData(stateInputs) = data println(s"Running: (login: $login, ip: $ip) \n ${taskInputs.code.take(30)}") val _sbtRun = SbtRun( @@ -183,8 +209,8 @@ class SbtProcess(runTimeout: FiniteDuration, inputs = taskInputs, isForcedProgramMode = false, progressActor = progressActor, - snippetActor = sender(), - timeoutEvent = None + snippetActor = snippetActor, + timeoutKey = None ) sendProgress(_sbtRun, SnippetProgress.default.copy(isDone = false, ts = Some(Instant.now.toEpochMilli), snippetId = Some(snippetId))) @@ -196,7 +222,7 @@ class SbtProcess(runTimeout: FiniteDuration, if (isReloading) { process ! Input("reload;compile/compileInputs") - gotoWithTimeout(sbtRun, Reloading, reloadTimeout) + gotoWithTimeout(sbtRun, reloading, SbtStateTimeout(reloadTimeout, "updating build configuration")) } else { gotoRunning(sbtRun) } @@ -205,7 +231,7 @@ class SbtProcess(runTimeout: FiniteDuration, val sbtRun = _sbtRun setInputs(sbtRun.inputs) sendProgress(sbtRun, report.toProgress(snippetId)) - goto(Ready) + Behaviors.same } } @@ -216,52 +242,65 @@ class SbtProcess(runTimeout: FiniteDuration, promptUniqueId ) - when(Reloading) { - case Event(output: ProcessOutput, sbtRun: SbtRun) => + def reloading(data: SbtRun): Behavior[Event] = { + println("-- Reloading --") + Behaviors.receiveMessage { + _reloading(data) orElse unhandled(data) + } + } + + private def _reloading(sbtRun: SbtRun): PartialFunction[Event, Behavior[Event]] = { + case ProcessOutputEvent(output) => val progress = extractor.extractProgress(output, sbtRun, isReloading = true) sendProgress(sbtRun, progress) if (progress.isSbtError) { - throw new Exception("sbt error: " + output.line) + throw new LetItDie("sbt error: " + output.line) } if (isPrompt(output.line)) { gotoRunning(sbtRun) } else { - stay() + Behaviors.same } } - when(Running) { - case Event(output: ProcessOutput, sbtRun: SbtRun) => + def running(data: SbtRun): Behavior[Event] = { + println("-- Running --") + Behaviors.receiveMessage { + _running(data) orElse unhandled(data) + } + } + + private def _running(sbtRun: SbtRun): PartialFunction[Event, Behavior[Event]] = { + case ProcessOutputEvent(output) => val progress = extractor.extractProgress(output, sbtRun, isReloading = false) sendProgress(sbtRun, progress) if (progress.isDone) { - sbtRun.timeoutEvent.foreach(_.cancel()) - goto(Ready).using(SbtData(sbtRun.inputs)) + sbtRun.timeoutKey.foreach(timers.cancel) + ready(SbtData(sbtRun.inputs)) } else { - stay() + Behaviors.same } } - private def gotoWithTimeout(sbtRun: SbtRun, nextState: SbtState, duration: FiniteDuration): this.State = { - - sbtRun.timeoutEvent.foreach(_.cancel()) - - val timeout = - context.system.scheduler.scheduleOnce( - duration, - self, - SbtStateTimeout(duration, nextState) - ) - - goto(nextState).using(sbtRun.copy(timeoutEvent = Some(timeout))) + private[this] var timeoutKey = 0L + + private def gotoWithTimeout( + sbtRun: SbtRun, + nextState: SbtRun => Behavior[Event], + sbtStateTimeout: SbtStateTimeout + ): Behavior[Event] = { + sbtRun.timeoutKey.foreach(timers.cancel) + timeoutKey += 1 + timers.startSingleTimer(timeoutKey, sbtStateTimeout, sbtStateTimeout.duration) + nextState(sbtRun.copy(timeoutKey = Some(timeoutKey))) } - private def gotoRunning(sbtRun: SbtRun): this.State = { + private def gotoRunning(sbtRun: SbtRun): Behavior[Event] = { process ! Input(sbtRun.inputs.target.sbtRunCommand(sbtRun.inputs.isWorksheetMode)) - gotoWithTimeout(sbtRun, Running, runTimeout) + gotoWithTimeout(sbtRun, running, SbtStateTimeout(runTimeout, "running code")) } private def isPrompt(line: String): Boolean = { diff --git a/sbt-runner/src/test/resources/application.conf b/sbt-runner/src/test/resources/application.conf index f83d1b46d..9ac74c3ed 100644 --- a/sbt-runner/src/test/resources/application.conf +++ b/sbt-runner/src/test/resources/application.conf @@ -1,6 +1 @@ -com.olegych.scastie { - sbt { - akka-port = 15150 - } -} -akka.actor.provider = akka.actor.LocalActorRefProvider \ No newline at end of file +akka.actor.provider = local diff --git a/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala b/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala index a7fc0a313..b382c946e 100644 --- a/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala +++ b/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala @@ -4,6 +4,7 @@ import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.TestActor.AutoPilot import akka.testkit.{ImplicitSender, TestKit, TestProbe} import com.olegych.scastie.api._ +import com.olegych.scastie.sbt.SbtProcess.SbtTaskEvent import com.olegych.scastie.util.SbtTask import org.scalatest.BeforeAndAfterAll import org.scalatest.funsuite.AnyFunSuiteLike @@ -233,17 +234,17 @@ class SbtActorTest() extends TestKit(ActorSystem("SbtActorTest")) with ImplicitS private val timeout = 40.seconds + import akka.actor.typed.scaladsl.adapter._ // SbtProcess uses Stash and it's not compatible with TestActorRef // https://stackoverflow.com/questions/18335127/testing-akka-actors-that-mixin-stash-with-testactorref - private val sbtActor = system.actorOf( - Props( - new SbtProcess( - runTimeout = timeout, - reloadTimeout = 20.seconds, - isProduction = false, - javaOptions = Seq("-Xms51m", "-Xmx550m") - ) - ) + private val sbtActor = system.spawn( + SbtProcess( + runTimeout = timeout, + reloadTimeout = 20.seconds, + isProduction = false, + javaOptions = Seq("-Xms51m", "-Xmx550m") + ), + name = "SbtRunner-test" ) private var currentId = 0 @@ -257,7 +258,7 @@ class SbtActorTest() extends TestKit(ActorSystem("SbtActorTest")) with ImplicitS val ip = "my-ip" val progressActor = TestProbe() - sbtActor ! SbtTask(snippetId, inputs, ip, None, progressActor.ref) + sbtActor ! SbtTaskEvent(SbtTask(snippetId, inputs, ip, None, progressActor.ref, self)) val totalTimeout = if (firstRun) timeout + 10.second diff --git a/server/src/main/resources/reference.conf b/server/src/main/resources/reference.conf index 32ab47420..b3101aefd 100644 --- a/server/src/main/resources/reference.conf +++ b/server/src/main/resources/reference.conf @@ -1,17 +1,10 @@ -com.olegych.scastie { - balancer { -// snippets-container = mongo - snippets-container = files - } -} - com.olegych.scastie.web { production = false session-secret = "WWItju7orWthk7vbAPqI72XOBCfZFxbVjMH169o9eLjHmMCGXw2VdBsQeTNF3WH0" oauth2 { - users-file = ./target/users.txt - sessions-file = ./target/sessions.json + users-file = ${com.olegych.scastie.data-dir}/users.txt + sessions-file = ${com.olegych.scastie.data-dir}/sessions.json # dev credentials # https://github.com/organizations/scalacenter/settings/applications/479150 @@ -19,12 +12,17 @@ com.olegych.scastie.web { client-secret = "3c269df0c8114316c42d1b66cf777eb36301b00b" uri = "http://localhost:9000" } + + bind { + hostname: "0.0.0.0" + port: 9000 + } } mongo-async-driver { - akka.remote.netty.tcp { - port = 16000 - bind-port = 16000 + akka.remote.artery { + canonical.port = 16000 + bind.port = 16000 } } @@ -34,26 +32,19 @@ akka { http.server { idle-timeout = 30s } - actor { - provider = "akka.remote.RemoteActorRefProvider" - } - remote { - netty.tcp { + remote.artery { + canonical { hostname = "127.0.0.1" port = 15000 - bind-hostname = "127.0.0.1" - bind-port = 15000 - - - # see https://github.com/scalacenter/scastie/issues/295 - # bind-hostname = ${com.olegych.scastie.web.hostname} - # bind-port = ${com.olegych.scastie.web.akka-port} - - message-frame-size = 4000kB - send-buffer-size = 4000kB - receive-buffer-size = 4000kB + } + advanced { maximum-frame-size = 4000kB } - maximum-payload-bytes = 4000kB } + cluster.seed-nodes = [ + # this node + "akka://"${com.olegych.scastie.system-name}"@"${akka.remote.artery.canonical.hostname}":"${akka.remote.artery.canonical.port}, + # remote node + "akka://"${com.olegych.scastie.system-name}"@127.0.0.1:5150" + ] } diff --git a/server/src/main/scala/com.olegych.scastie.web/RestApiServer.scala b/server/src/main/scala/com.olegych.scastie.web/RestApiServer.scala index 071f5952f..f59b88728 100644 --- a/server/src/main/scala/com.olegych.scastie.web/RestApiServer.scala +++ b/server/src/main/scala/com.olegych.scastie.web/RestApiServer.scala @@ -3,20 +3,20 @@ package web import api._ import balancer._ - -import akka.pattern.ask -import akka.actor.ActorRef +import akka.actor.typed.scaladsl.AskPattern.Askable +import akka.actor.typed.{ActorRef, Scheduler} import akka.util.Timeout import akka.http.scaladsl.model.RemoteAddress +import com.olegych.scastie.util.FormatReq -import scala.concurrent.{Future, ExecutionContext} +import scala.concurrent.Future import scala.concurrent.duration.DurationInt class RestApiServer( - dispatchActor: ActorRef, + dispatchActor: ActorRef[DispatchActor.Message], ip: RemoteAddress, maybeUser: Option[User] -)(implicit executionContext: ExecutionContext) +)(implicit scheduler: Scheduler) extends RestApi { implicit val timeout: Timeout = Timeout(20.seconds) @@ -24,30 +24,23 @@ class RestApiServer( private def wrap(inputs: Inputs): InputsWithIpAndUser = InputsWithIpAndUser(inputs, UserTrace(ip.toString, maybeUser)) - def run(inputs: Inputs): Future[SnippetId] = { + def run(inputs: Inputs): Future[SnippetId] = dispatchActor - .ask(RunSnippet(wrap(inputs))) - .mapTo[SnippetId] - } + .ask(RunSnippet(_, wrap(inputs))) - def format(formatRequest: FormatRequest): Future[FormatResponse] = { + def format(formatRequest: FormatRequest): Future[FormatResponse] = dispatchActor - .ask(formatRequest) - .mapTo[FormatResponse] - } + .ask(FormatReq(_, formatRequest)) - def save(inputs: Inputs): Future[SnippetId] = { + def save(inputs: Inputs): Future[SnippetId] = dispatchActor - .ask(SaveSnippet(wrap(inputs))) - .mapTo[SnippetId] - } + .ask(SaveSnippet(_, wrap(inputs))) def update(editInputs: EditInputs): Future[Option[SnippetId]] = { import editInputs._ if (snippetId.isOwnedBy(maybeUser)) { dispatchActor - .ask(UpdateSnippet(snippetId, wrap(inputs))) - .mapTo[Option[SnippetId]] + .ask(UpdateSnippet(_, snippetId, wrap(inputs))) } else { Future.successful(None) } @@ -56,9 +49,7 @@ class RestApiServer( def delete(snippetId: SnippetId): Future[Boolean] = { if (snippetId.isOwnedBy(maybeUser)) { dispatchActor - .ask(DeleteSnippet(snippetId)) - .mapTo[Unit] - .map(_ => true) + .ask(DeleteSnippet(_, snippetId)) } else { Future.successful(false) } @@ -67,21 +58,17 @@ class RestApiServer( def fork(editInputs: EditInputs): Future[Option[SnippetId]] = { import editInputs._ dispatchActor - .ask(ForkSnippet(snippetId, wrap(inputs))) + .ask(ForkSnippet(_, snippetId, wrap(inputs))) .mapTo[Option[SnippetId]] } - def fetch(snippetId: SnippetId): Future[Option[FetchResult]] = { + def fetch(snippetId: SnippetId): Future[Option[FetchResult]] = dispatchActor - .ask(FetchSnippet(snippetId)) - .mapTo[Option[FetchResult]] - } + .ask(FetchSnippet(_, snippetId)) - def fetchOld(id: Int): Future[Option[FetchResult]] = { + def fetchOld(id: Int): Future[Option[FetchResult]] = dispatchActor - .ask(FetchOldSnippet(id)) - .mapTo[Option[FetchResult]] - } + .ask(FetchOldSnippet(_, id)) def fetchUser(): Future[Option[User]] = { Future.successful(maybeUser) @@ -91,8 +78,7 @@ class RestApiServer( maybeUser match { case Some(user) => dispatchActor - .ask(FetchUserSnippets(user)) - .mapTo[List[SnippetSummary]] + .ask(FetchUserSnippets(_, user)) case _ => Future.successful(Nil) } } diff --git a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala index 1cdce15f6..37ee431bb 100644 --- a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala +++ b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala @@ -3,8 +3,9 @@ package com.olegych.scastie.web import com.olegych.scastie.web.routes._ import com.olegych.scastie.web.oauth2._ import com.olegych.scastie.balancer._ -import com.olegych.scastie.util.ScastieFileUtil - +import com.olegych.scastie.util.{ShowConfig, ScastieFileUtil} +import com.typesafe.sslconfig.util.{ConfigLoader, EnrichedConfig} +import com.olegych.scastie.util.ConfigLoaders._ import akka.http.scaladsl._ import server.Directives._ @@ -12,87 +13,120 @@ import ch.megard.akka.http.cors.scaladsl.CorsDirectives._ import com.typesafe.config.ConfigFactory import com.typesafe.scalalogging.Logger -import akka.actor.{ActorSystem, Props} -import akka.stream.ActorMaterializer +import akka.actor.typed.scaladsl.Behaviors +import akka.actor.typed.{ActorSystem, Behavior, Scheduler} import scala.concurrent.duration._ -import scala.concurrent.Await +import scala.concurrent.{Await, ExecutionContext} object ServerMain { def main(args: Array[String]): Unit = { - val logger = Logger("ServerMain") - val port = - if (args.isEmpty) 9000 - else args.head.toInt - - val config2 = ConfigFactory.load().getConfig("akka.remote.netty.tcp") - println("akka tcp config") - println(config2.getString("hostname")) - println(config2.getInt("port")) + val config = EnrichedConfig( + ConfigFactory.load().getConfig("com.olegych.scastie") + ) + val webConf = config.get[WebConf]("web") + val balancerConf = config.get[BalancerConf]("balancer") - val config = ConfigFactory.load().getConfig("com.olegych.scastie.web") - val production = config.getBoolean("production") + val port = args.headOption.map(_.toInt).getOrElse(webConf.bind.port) - if (production) { - ScastieFileUtil.writeRunningPid() - } + val system = ActorSystem[Nothing]( + Guardian(webConf, balancerConf, port), + config.get[String]("system-name") + ) - implicit val system: ActorSystem = ActorSystem("Web") - import system.dispatcher - implicit val materializer: ActorMaterializer = ActorMaterializer() + logger.info(ShowConfig(system.settings.config, + s"""|# Scastie sever started with config: + |akka.remote.artery { + | canonical + | bind + |} + |com.olegych.scastie.web.bind { + | hostname + | port = $port + |}""".stripMargin)) - val github = new Github - val session = new GithubUserSession(system) - val userDirectives = new UserDirectives(session) + Await.result(system.whenTerminated, Duration.Inf) + } +} - val progressActor = - system.actorOf( - Props[ProgressActor](), - name = "ProgressActor" +private object Guardian { + def apply(webCfg: WebConf, balancerCfg: BalancerConf, port: Int): Behavior[Nothing] = + Behaviors.setup[Nothing] { context => + import context.spawn + implicit def system: ActorSystem[Nothing] = context.system + implicit def ec: ExecutionContext = context.system.executionContext + implicit def sc: Scheduler = context.system.scheduler + + if (webCfg.production) { + ScastieFileUtil.writeRunningPid() + } + + val github = new Github(webCfg.oauth2) + val session = new GithubUserSession( + webCfg, + spawn(ActorRefreshTokenStorageImpl(), "refresh-token-storage") ) + val userDirectives = new UserDirectives(session) - val statusActor = - system.actorOf( - StatusActor.props, - name = "StatusActor" - ) + val progressActor = spawn(ProgressActor(), "ProgressActor") - val dispatchActor = - system.actorOf( - Props(new DispatchActor(progressActor, statusActor)), - name = "DispatchActor" - ) + val statusActor = spawn(StatusActor(), "StatusActor") + + val dispatchActor = spawn(DispatchActor(progressActor, statusActor, balancerCfg), "DispatchActor") - val routes = concat( - cors()( - pathPrefix("api")( + val routes = concat( + cors()( + pathPrefix("api")( + concat( + new ApiRoutes(dispatchActor, userDirectives).routes, + new ProgressRoutes(progressActor).routes, + new DownloadRoutes(dispatchActor).routes, + new StatusRoutes(statusActor, userDirectives).routes, + new ScalaJsRoutes(dispatchActor).routes + ) + ) + ), + new OAuth2Routes(github, session).routes, + cors()( concat( - new ApiRoutes(dispatchActor, userDirectives).routes, - new ProgressRoutes(progressActor).routes, - new DownloadRoutes(dispatchActor).routes, - new StatusRoutes(statusActor, userDirectives).routes, - new ScalaJsRoutes(dispatchActor).routes + new ScalaLangRoutes(dispatchActor, userDirectives).routes, + new FrontPageRoutes(webCfg.production).routes ) ) - ), - new OAuth2Routes(github, session).routes, - cors()( - concat( - new ScalaLangRoutes(dispatchActor, userDirectives).routes, - new FrontPageRoutes(production).routes - ) ) - ) - Await.result(Http().bindAndHandle(routes, "0.0.0.0", port), 1.seconds) - logger.info(s"Scastie started (port: $port)") + Await.result( + Http() + .newServerAt(webCfg.bind.hostname, port) + .bindFlow(routes), 1.seconds) -// scala.io.StdIn.readLine("press enter to stop server") -// system.terminate() - Await.result(system.whenTerminated, Duration.Inf) + Behaviors.empty + } +} - () - } +case class WebConf( + production: Boolean, + oauth2: Oauth2Conf, + sessionSecret: String, + bind: BindConf, +) +object WebConf { + implicit val loader: ConfigLoader[WebConf] = (c: EnrichedConfig) => WebConf( + c.get[Boolean]("production"), + c.get[Oauth2Conf]("oauth2"), + c.get[String]("session-secret"), + c.get[BindConf]("bind") + ) +} +case class BindConf( + hostname: String, + port: Int, +) +object BindConf { + implicit val loader: ConfigLoader[BindConf] = (c: EnrichedConfig) => BindConf( + c.get[String]("hostname"), + c.get[Int]("port") + ) } diff --git a/server/src/main/scala/com.olegych.scastie.web/oauth2/Github.scala b/server/src/main/scala/com.olegych.scastie.web/oauth2/Github.scala index 46575f3c2..f9d58e3b9 100644 --- a/server/src/main/scala/com.olegych.scastie.web/oauth2/Github.scala +++ b/server/src/main/scala/com.olegych.scastie.web/oauth2/Github.scala @@ -9,28 +9,44 @@ import headers._ import Uri._ import unmarshalling.Unmarshal -import akka.actor.ActorSystem -import akka.stream.ActorMaterializer +import akka.actor.ClassicActorSystemProvider import com.olegych.scastie.api.User -import scala.concurrent.Future -import com.typesafe.config.ConfigFactory -import play.api.libs.json.{OFormat, Reads} +import scala.concurrent.{ExecutionContext, Future} +import com.typesafe.sslconfig.util.{ConfigLoader, EnrichedConfig} +import com.olegych.scastie.util.ConfigLoaders._ + +import java.nio.file.{Path => JPath} case class AccessToken(access_token: String) -class Github(implicit system: ActorSystem, materializer: ActorMaterializer) extends PlayJsonSupport { - import system.dispatcher +case class Oauth2Conf( + usersFile: JPath, + sessionsFile: JPath, + clientId: String, + clientSecret: String, + uri: String, +) + +object Oauth2Conf { + implicit val loader: ConfigLoader[Oauth2Conf] = (c: EnrichedConfig) => Oauth2Conf( + c.get[JPath]("users-file"), + c.get[JPath]("sessions-file"), + c.get[String]("client-id"), + c.get[String]("client-secret"), + c.get[String]("uri") + ) +} + +class Github(config: Oauth2Conf)(implicit system: ClassicActorSystemProvider) extends PlayJsonSupport { + implicit def ec: ExecutionContext = system.classicSystem.dispatcher import play.api.libs.json._ implicit val formatUser: OFormat[User] = Json.format[User] implicit val readAccessToken: Reads[AccessToken] = Json.reads[AccessToken] - private val config = - ConfigFactory.load().getConfig("com.olegych.scastie.web.oauth2") - val clientId: String = config.getString("client-id") - private val clientSecret = config.getString("client-secret") - private val redirectUri = config.getString("uri") + "/callback" + val clientId: String = config.clientId + private val redirectUri = config.uri + "/callback" def getUserWithToken(token: String): Future[User] = info(token) def getUserWithOauth2(code: String): Future[User] = { @@ -42,7 +58,7 @@ class Github(implicit system: ActorSystem, materializer: ActorMaterializer) exte uri = Uri("https://github.com/login/oauth/access_token").withQuery( Query( "client_id" -> clientId, - "client_secret" -> clientSecret, + "client_secret" -> config.clientSecret, "code" -> code, "redirect_uri" -> redirectUri ) diff --git a/server/src/main/scala/com.olegych.scastie.web/oauth2/GithubUserSession.scala b/server/src/main/scala/com.olegych.scastie.web/oauth2/GithubUserSession.scala index 1e76612d5..e544ba056 100644 --- a/server/src/main/scala/com.olegych.scastie.web/oauth2/GithubUserSession.scala +++ b/server/src/main/scala/com.olegych.scastie.web/oauth2/GithubUserSession.scala @@ -4,30 +4,30 @@ import java.lang.System.{lineSeparator => nl} import java.nio.file._ import java.util.UUID -import akka.actor.ActorSystem +import akka.actor.typed.{ActorRef, Scheduler} import com.olegych.scastie.api.User +import com.olegych.scastie.web.WebConf import com.softwaremill.session._ -import com.typesafe.config.ConfigFactory import com.typesafe.scalalogging.Logger import play.api.libs.json.Json import scala.collection.concurrent.TrieMap +import scala.concurrent.ExecutionContext import scala.jdk.CollectionConverters._ import scala.util.Try import scala.util.control.NonFatal -class GithubUserSession(system: ActorSystem) { +class GithubUserSession( + conf: WebConf, + refreshTokenStorageImpl: ActorRef[ActorRefreshTokenStorage.Message] +)(implicit scheduler: Scheduler, ec: ExecutionContext) { val logger = Logger("GithubUserSession") - private val configuration = - ConfigFactory.load().getConfig("com.olegych.scastie.web") - private val usersFile = - Paths.get(configuration.getString("oauth2.users-file")) - private val usersSessions = - Paths.get(configuration.getString("oauth2.sessions-file")) + private val usersFile = conf.oauth2.usersFile + private val usersSessions = conf.oauth2.sessionsFile private val sessionConfig = - SessionConfig.default(configuration.getString("session-secret")) + SessionConfig.default(conf.sessionSecret) private lazy val users = { val trie = TrieMap[UUID, User]() @@ -41,7 +41,7 @@ class GithubUserSession(system: ActorSystem) { (id: String) => Try { UUID.fromString(id) } ) implicit val sessionManager = new SessionManager[UUID](sessionConfig) - implicit val refreshTokenStorage = new ActorRefreshTokenStorage(system) + implicit val refreshTokenStorage = new ActorRefreshTokenStorage(refreshTokenStorageImpl) private def readSessionsFile(): Vector[(UUID, User)] = { if (Files.exists(usersSessions)) { diff --git a/server/src/main/scala/com.olegych.scastie.web/oauth2/InMemoryRefreshTokenStorage.scala b/server/src/main/scala/com.olegych.scastie.web/oauth2/InMemoryRefreshTokenStorage.scala index 18212e147..1e4da4ed0 100644 --- a/server/src/main/scala/com.olegych.scastie.web/oauth2/InMemoryRefreshTokenStorage.scala +++ b/server/src/main/scala/com.olegych.scastie.web/oauth2/InMemoryRefreshTokenStorage.scala @@ -1,25 +1,28 @@ package com.olegych.scastie.web.oauth2 import com.softwaremill.session.{RefreshTokenData, RefreshTokenStorage, RefreshTokenLookupResult} -import akka.actor.{Actor, ActorSystem, Props} -import akka.pattern.ask +import akka.actor.typed.{ActorRef, Behavior, Scheduler, SupervisorStrategy} +import akka.actor.typed.scaladsl.Behaviors +import akka.actor.typed.scaladsl.AskPattern.Askable import akka.util.Timeout -import scala.concurrent.Future +import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ import scala.collection.mutable import java.util.UUID +import ActorRefreshTokenStorage.Message + private[oauth2] case class SessionStorage(session: UUID, tokenHash: String, expires: Long) -class ActorRefreshTokenStorage(system: ActorSystem) extends RefreshTokenStorage[UUID] { - import system.dispatcher +class ActorRefreshTokenStorage( + impl: ActorRef[Message] +)(implicit scheduler: Scheduler, ec: ExecutionContext) extends RefreshTokenStorage[UUID] { implicit private val timeout = Timeout(10.seconds) - private val impl = system.actorOf(Props(new ActorRefreshTokenStorageImpl())) def lookup(selector: String): Future[Option[RefreshTokenLookupResult[UUID]]] = - (impl ? Lookup(selector)).mapTo[Option[RefreshTokenLookupResult[UUID]]] + impl.ask(Lookup(_, selector)) def store(data: RefreshTokenData[UUID]): Future[Unit] = { impl ! Store(data) @@ -31,30 +34,44 @@ class ActorRefreshTokenStorage(system: ActorSystem) extends RefreshTokenStorage[ } def schedule[S](after: Duration)(op: => Future[S]): Unit = { after match { - case finite: FiniteDuration => system.scheduler.scheduleOnce(finite)(op) + case finite: FiniteDuration => scheduler.scheduleOnce(finite, () => op) case _: Duration.Infinite => () } } } -private[oauth2] case class Lookup(selector: String) -private[oauth2] case class Store(data: RefreshTokenData[UUID]) -private[oauth2] case class Remove(selector: String) +object ActorRefreshTokenStorage { + sealed trait Message +} +private case class Lookup( + replyTo: ActorRef[Option[RefreshTokenLookupResult[UUID]]], + selector: String) extends Message +private case class Store(data: RefreshTokenData[UUID]) extends Message +private case class Remove(selector: String) extends Message -class ActorRefreshTokenStorageImpl() extends Actor { +object ActorRefreshTokenStorageImpl { private val storage = mutable.Map[String, SessionStorage]() - override def receive: Receive = { - case Lookup(selector) => + + def apply(): Behavior[Message] = + Behaviors + .supervise[Message](receive) + .onFailure(SupervisorStrategy.resume) + + private def receive: Behavior[Message] = Behaviors.receiveMessage { + case Lookup(replyTo, selector) => val lookupResult = storage .get(selector) .map( s => RefreshTokenLookupResult(s.tokenHash, s.expires, () => s.session) ) - sender() ! lookupResult + replyTo ! lookupResult + Behaviors.same case Store(data) => storage.put(data.selector, SessionStorage(data.forSession, data.tokenHash, data.expires)) + Behaviors.same case Remove(selector) => storage.remove(selector) + Behaviors.same } } diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala index 5c2b8a135..5c83d01ca 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/ApiRoutes.scala @@ -1,21 +1,21 @@ package com.olegych.scastie.web.routes -import akka.actor.{ActorRef, ActorSystem} +import akka.actor.typed.{ActorRef, Scheduler} import akka.http.scaladsl.coding.Coders.Gzip import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{Directive1, Route} import com.olegych.scastie.api._ +import com.olegych.scastie.balancer.DispatchActor import com.olegych.scastie.web._ import com.olegych.scastie.web.oauth2._ class ApiRoutes( - dispatchActor: ActorRef, + dispatchActor: ActorRef[DispatchActor.Message], userDirectives: UserDirectives -)(implicit system: ActorSystem) +)(implicit scheduler: Scheduler) extends PlayJsonSupport { import play.api.libs.json._ - import system.dispatcher import userDirectives.optionalLogin implicit val readsInputs: Reads[Inputs] = Json.reads[Inputs] diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/DownloadRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/DownloadRoutes.scala index e36216679..03391cc86 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/DownloadRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/DownloadRoutes.scala @@ -1,26 +1,24 @@ package com.olegych.scastie.web.routes -import com.olegych.scastie.balancer.DownloadSnippet - +import com.olegych.scastie.balancer.{DispatchActor, DownloadSnippet} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route - -import akka.actor.ActorRef -import akka.pattern.ask - -import java.nio.file.Path +import akka.actor.typed.{ActorRef, Scheduler} +import akka.actor.typed.scaladsl.AskPattern.Askable import akka.util.Timeout import scala.concurrent.duration.DurationInt -class DownloadRoutes(dispatchActor: ActorRef) { - implicit val timeout = Timeout(5.seconds) +class DownloadRoutes( + dispatchActor: ActorRef[DispatchActor.Message] +)(implicit scheduler: Scheduler) { + private implicit val timeout: Timeout = Timeout(5.seconds) val routes: Route = get { snippetIdStart("download")( sid => - onSuccess((dispatchActor ? DownloadSnippet(sid)).mapTo[Option[Path]]) { + onSuccess(dispatchActor.ask(DownloadSnippet(_, sid))) { case Some(path) => getFromFile(path.toFile) case None => diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/OAuth2Routes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/OAuth2Routes.scala index f89d8993e..4a8a8b12f 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/OAuth2Routes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/OAuth2Routes.scala @@ -29,7 +29,7 @@ class OAuth2Routes(github: Github, session: GithubUserSession)( path("login") { parameter("home".?)( home => - optionalHeaderValueByType[Referer](()) { referrer => + optionalHeaderValueByType(Referer) { referrer => redirect( Uri("https://github.com/login/oauth/authorize").withQuery( Query( @@ -47,7 +47,7 @@ class OAuth2Routes(github: Github, session: GithubUserSession)( ) }, path("logout") { - headerValueByType[Referer](()) { referrer => + headerValueByType(Referer) { referrer => requiredSession(refreshable, usingCookies) { _ => invalidateSession(refreshable, usingCookies) { ctx => ctx.complete( @@ -63,7 +63,7 @@ class OAuth2Routes(github: Github, session: GithubUserSession)( }, pathPrefix("callback") { pathEnd { - parameters(("code", "state".?)) { (code, state) => + parameters("code", "state".?) { (code, state) => onSuccess(github.getUserWithOauth2(code)) { user => setSession(refreshable, usingCookies, session.addUser(user)) { setNewCsrfToken(checkHeader) { ctx => diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala index 742dbf75e..169585a0d 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/ProgressRoutes.scala @@ -1,7 +1,7 @@ package com.olegych.scastie.web.routes import akka.NotUsed -import akka.actor.ActorRef +import akka.actor.typed.{ActorRef, Scheduler} import akka.http.scaladsl.coding.Coders.Gzip import akka.http.scaladsl.marshalling.sse.EventStreamMarshalling._ import akka.http.scaladsl.model._ @@ -9,16 +9,17 @@ import akka.http.scaladsl.model.sse.ServerSentEvent import akka.http.scaladsl.model.ws.TextMessage._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route -import akka.pattern.ask +import akka.actor.typed.scaladsl.AskPattern.Askable import akka.stream.scaladsl._ import com.olegych.scastie.api._ import com.olegych.scastie.balancer._ +import akka.util.Timeout import play.api.libs.json.Json import scala.concurrent.Future import scala.concurrent.duration.DurationInt -class ProgressRoutes(progressActor: ActorRef) { +class ProgressRoutes(progressActor: ActorRef[ProgressMessage])(implicit scheduler: Scheduler) { val routes: Route = encodeResponseWith(Gzip)( concat( snippetIdStart("progress-sse") { sid => @@ -37,8 +38,9 @@ class ProgressRoutes(progressActor: ActorRef) { private def progressSource( snippetId: SnippetId ): Source[SnippetProgress, NotUsed] = { + implicit val timeout: Timeout = 1.second Source - .fromFuture((progressActor ? SubscribeProgress(snippetId))(1.second).mapTo[Source[SnippetProgress, NotUsed]]) + .future(progressActor.ask[Source[SnippetProgress, NotUsed]](SubscribeProgress(snippetId, _))) .flatMapConcat(identity) } diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala index b69f5da16..405dc76db 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/ScalaJsRoutes.scala @@ -1,20 +1,23 @@ package com.olegych.scastie.web.routes import com.olegych.scastie.api._ - +import com.olegych.scastie.balancer.DispatchActor.Adapter.{ + FetchScalaJs, FetchScalaSource, FetchScalaJsSourceMap +} import akka.util.Timeout - -import akka.pattern.ask -import akka.actor.{ActorRef, ActorSystem} +import akka.actor.typed.scaladsl.AskPattern.Askable +import akka.actor.typed.{ActorRef, Scheduler} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.http.scaladsl.coding.Coders.Gzip +import com.olegych.scastie.balancer.DispatchActor +import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt //not used anymore -class ScalaJsRoutes(dispatchActor: ActorRef)(implicit system: ActorSystem) { - import system.dispatcher +class ScalaJsRoutes(dispatchActor: ActorRef[DispatchActor.Message] +)(implicit ec: ExecutionContext, scheduler: Scheduler) { implicit val timeout: Timeout = Timeout(1.seconds) @@ -24,23 +27,24 @@ class ScalaJsRoutes(dispatchActor: ActorRef)(implicit system: ActorSystem) { snippetIdEnd(Shared.scalaJsHttpPathPrefix, ScalaTarget.Js.targetFilename)( sid => complete( - (dispatchActor ? FetchScalaJs(sid)) - .mapTo[Option[FetchResultScalaJs]] + dispatchActor + .ask(FetchScalaJs(_, sid)) .map(_.map(_.content)) ) ), snippetIdEnd(Shared.scalaJsHttpPathPrefix, ScalaTarget.Js.sourceFilename)( sid => complete( - (dispatchActor ? FetchScalaSource(sid)) - .mapTo[Option[FetchResultScalaSource]] + dispatchActor + .ask(FetchScalaSource(_, sid)) .map(_.map(_.content)) ) ), snippetIdEnd(Shared.scalaJsHttpPathPrefix, ScalaTarget.Js.sourceMapFilename)( sid => complete( - (dispatchActor ? FetchScalaJsSourceMap(sid)) + dispatchActor + .ask(FetchScalaJsSourceMap(_, sid)) .mapTo[Option[FetchResultScalaJsSourceMap]] .map(_.map(_.content)) ) diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/ScalaLangRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/ScalaLangRoutes.scala index d5404b392..7f3bca96d 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/ScalaLangRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/ScalaLangRoutes.scala @@ -6,22 +6,21 @@ import com.olegych.scastie.web.oauth2._ import com.olegych.scastie.balancer._ import akka.util.Timeout -import akka.actor.{ActorRef, ActorSystem} - +import akka.actor.typed.{ActorRef, Scheduler} import akka.http.scaladsl.model.StatusCodes.Created import akka.http.scaladsl.server.Route import akka.http.scaladsl.server.Directives._ -import akka.pattern.ask +import akka.actor.typed.scaladsl.AskPattern.Askable +import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt // temporary route for the scala-lang frontpage class ScalaLangRoutes( - dispatchActor: ActorRef, + dispatchActor: ActorRef[DispatchActor.Message], userDirectives: UserDirectives -)(implicit system: ActorSystem) { - import system.dispatcher +)(implicit ec: ExecutionContext, scheduler: Scheduler) { import userDirectives.optionalLogin implicit val timeout: Timeout = Timeout(5.seconds) @@ -43,7 +42,7 @@ class ScalaLangRoutes( ) complete( - (dispatchActor ? RunSnippet(inputs)) + dispatchActor.ask(RunSnippet(_, inputs)) .mapTo[SnippetId] .map( snippetId => diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/StatusRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/StatusRoutes.scala index cab66841f..e29dfd0e6 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/StatusRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/StatusRoutes.scala @@ -1,15 +1,16 @@ package com.olegych.scastie.web.routes import akka.NotUsed -import akka.actor.ActorRef +import akka.actor.typed.{ActorRef, Scheduler} import akka.http.scaladsl.marshalling.sse.EventStreamMarshalling._ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.sse.ServerSentEvent import akka.http.scaladsl.model.ws.TextMessage._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.{Route, _} -import akka.pattern.ask +import akka.actor.typed.scaladsl.AskPattern.Askable import akka.stream.scaladsl._ +import akka.util.Timeout import com.olegych.scastie.api._ import com.olegych.scastie.balancer._ import com.olegych.scastie.web.oauth2.UserDirectives @@ -18,7 +19,9 @@ import play.api.libs.json.Json import scala.concurrent.duration.DurationInt import scala.concurrent.{ExecutionContext, Future} -class StatusRoutes(statusActor: ActorRef, userDirectives: UserDirectives)(implicit ec: ExecutionContext) { +class StatusRoutes( + statusActor: ActorRef[StatusActor.Message], userDirectives: UserDirectives +)(implicit ec: ExecutionContext, scheduler: Scheduler) { val isAdminUser: Directive1[Boolean] = userDirectives.optionalLogin.map( @@ -58,8 +61,10 @@ class StatusRoutes(statusActor: ActorRef, userDirectives: UserDirectives)(implic case _ => progress } + + implicit val timeout: Timeout = 2.second Source - .fromFuture((statusActor ? SubscribeStatus)(2.seconds).mapTo[Source[StatusProgress, NotUsed]]) + .future(statusActor.ask(SubscribeStatus.apply)) .flatMapConcat(s => s.map(hideTask)) } diff --git a/utils/src/main/resources/reference.conf b/utils/src/main/resources/reference.conf index ade1a37d5..c3850c22f 100644 --- a/utils/src/main/resources/reference.conf +++ b/utils/src/main/resources/reference.conf @@ -1,19 +1,9 @@ -com.olegych.scastie { - web { - hostname = "127.0.0.1" - hostname = ${?SERVER_HOSTNAME} - akka-port = 15000 - akka-port = ${?SERVER_AKKA_PORT} - } - - sbt { - sbtReloadTimeout = 100s - runTimeout = 30s - } -} - +# The name of the cluster’s ActorSystem must be the same for all members, +# which is passed in when you start the ActorSystem. +com.olegych.scastie.system-name = "sys" akka { + # config for akka.contrib.process.BlockingProcess process { blocking-process { # The configuration key to use in order to override the dispatcher used for blocking IO. @@ -38,4 +28,13 @@ akka { inspection-interval = 1 second } } + + actor.serialization-bindings { + "com.olegych.scastie.util.SbtMessage" = jackson-json + "com.olegych.scastie.util.BalancerMessage" = jackson-json + "com.olegych.scastie.api.ProgressMessage" = jackson-json + } + serialization.jackson { + jackson-modules += "com.olegych.scastie.util.PlayJackson" + } } diff --git a/utils/src/main/scala/com.olegych.scastie/util/BlockingProcess.scala b/utils/src/main/scala/com.olegych.scastie/util/BlockingProcess.scala index 2dee1aebc..82f0118a3 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/BlockingProcess.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/BlockingProcess.scala @@ -3,17 +3,17 @@ */ package akka.contrib.process -import akka.actor.{Actor, ActorLogging, ActorRef, NoSerializationVerificationNeeded, Props, SupervisorStrategy, Terminated} +import akka.actor.typed.{ActorRef, Behavior, DispatcherSelector, PostStop, PreRestart, Signal, Terminated} +import akka.actor.typed.scaladsl.{AbstractBehavior, ActorContext, Behaviors} +import akka.actor.NoSerializationVerificationNeeded import akka.stream.{ActorAttributes, IOResult} import akka.stream.scaladsl.{Sink, Source, StreamConverters} import akka.util.{ByteString, Helpers} import java.io.File import java.lang.{Process => JavaProcess, ProcessBuilder => JavaProcessBuilder} -import java.util.concurrent.TimeUnit - import scala.collection.immutable import scala.concurrent.{Future, blocking} -import scala.concurrent.duration.Duration +import scala.jdk.DurationConverters._ object BlockingProcess { @@ -33,6 +33,9 @@ object BlockingProcess { final val BlockingIODispatcherId = "akka.process.blocking-process.blocking-io-dispatcher-id" + /** Base trait for all messages send to the receiver */ + sealed trait Response + /** * Sent to the receiver on startup - specifies the streams used for managing input, output and error respectively. * This message should only be received by the parent of the BlockingProcess and should not be passed across the @@ -46,56 +49,61 @@ object BlockingProcess { stdin: Sink[ByteString, Future[IOResult]], stdout: Source[ByteString, Future[IOResult]], stderr: Source[ByteString, Future[IOResult]]) - extends NoSerializationVerificationNeeded + extends NoSerializationVerificationNeeded with Response /** * Sent to the receiver after the process has exited. * * @param exitValue the exit value of the process */ - case class Exited(exitValue: Int) + case class Exited(exitValue: Int) extends Response + + /** Base trait for all messages received by BlockingProcess actor */ + sealed trait Message /** * Send a request to destroy the process. * On POSIX, this sends a SIGTERM, but implementation is platform specific. */ - case object Destroy + case object Destroy extends Message /** * Send a request to forcibly destroy the process. * On POSIX, this sends a SIGKILL, but implementation is platform specific. */ - case object DestroyForcibly + case object DestroyForcibly extends Message /** * Sent if stdin from the process is terminated */ - case object StdinTerminated + case object StdinTerminated extends Message /** * Sent if stdout from the process is terminated */ - case object StdoutTerminated + case object StdoutTerminated extends Message /** * Sent if stderr from the process is terminated */ - case object StderrTerminated + case object StderrTerminated extends Message /** - * Create Props for a [[BlockingProcess]] actor. + * Create a [[BlockingProcess]] actor Behavior. * * @param command signifies the program to be executed and its optional arguments * @param workingDir the working directory for the process; default is the current working directory * @param environment the environment for the process; default is `Map.emtpy` - * @param stdioTimeout the amount of time to tolerate waiting for a process to communicate back to this actor - * @return Props for a [[BlockingProcess]] actor + * @param messageAdapter the receiver. + * @return The [[BlockingProcess]] actor Behavior */ - def props(command: immutable.Seq[String], + def apply(command: immutable.Seq[String], workingDir: File = new File(System.getProperty("user.dir")), environment: Map[String, String] = Map.empty, - stdioTimeout: Duration = Duration.Undefined) = - Props(new BlockingProcess(command, workingDir, environment, stdioTimeout)) + messageAdapter: ActorRef[Response]): Behavior[Message] = + Behaviors.setup { ctx => + new BlockingProcess(command, workingDir, environment, messageAdapter)(ctx) + } private def prepareCommand(command: Seq[String]) = if (Helpers.isWindows) List("cmd", "/c") ++ (command map winQuote) @@ -125,6 +133,8 @@ object BlockingProcess { } } +import BlockingProcess._ + /** * This actor uses the JDK process API. As such, more memory given that more threads are consumed. Favor the * [[NonBlockingProcess]] actor unless you *need* to use the JDK. @@ -138,17 +148,20 @@ object BlockingProcess { * A dispatcher as indicated by the "akka.process.blocking-process.blocking-io-dispatcher-id" setting is used * internally by the actor as various JDK calls are made which can block. */ -class BlockingProcess(command: immutable.Seq[String], directory: File, environment: Map[String, String], stdioTimeout: Duration) - extends Actor - with ActorLogging { +class BlockingProcess private ( + command: immutable.Seq[String], + directory: File, + environment: Map[String, String], + messageAdapter: ActorRef[Response] +)(ctx: ActorContext[Message]) extends AbstractBehavior[Message](ctx) { - import BlockingProcess._ - import context.dispatcher + import ctx.{executionContext, self, log} - override val supervisorStrategy: SupervisorStrategy = - SupervisorStrategy.stoppingStrategy + // stopped if an exception is thrown and no supervision strategy is defined + // https://doc.akka.io/docs/akka/current/typed/fault-tolerance.html + // override val supervisorStrategy = SupervisorStrategy.stoppingStrategy - override def preStart(): Unit = { + private def preStart(): Unit = { println("preStart") val process: JavaProcess = { import scala.jdk.CollectionConverters._ @@ -181,49 +194,55 @@ class BlockingProcess(command: immutable.Seq[String], directory: File, environme .withAttributes(selfDispatcherAttribute) .mapMaterializedValue(_.andThen { case _ => self ! StderrTerminated }) - context.parent ! Started(getPid(process), stdin, stdout, stderr) + messageAdapter ! Started(getPid(process), stdin, stdout, stderr) log.debug( s"Blocking process started with dispatcher: $blockingIODispatcherId" ) - } finally { context.watch( - context.actorOf( - ProcessDestroyer - .props(process, context.parent) - .withDispatcher(blockingIODispatcherId), - "process-destroyer" + context.spawn( + ProcessDestroyer(process, messageAdapter), + "process-destroyer", + DispatcherSelector.fromConfig(blockingIODispatcherId) ) ) } } - override def receive: Receive = { - case Destroy => - log.debug("Received request to destroy the process.") - tellDestroyer(ProcessDestroyer.Destroy) - case DestroyForcibly => - log.debug("Received request to forcibly destroy the process.") - tellDestroyer(ProcessDestroyer.DestroyForcibly) - case Terminated(_) => - context.stop(self) - case StdinTerminated => - log.debug("Stdin was terminated") - tellDestroyer(ProcessDestroyer.Inspect) - case StdoutTerminated => - log.debug("Stdout was terminated") - tellDestroyer(ProcessDestroyer.Inspect) - case StderrTerminated => - log.debug("Stderr was terminated") - tellDestroyer(ProcessDestroyer.Inspect) + preStart() + + override val onSignal: PartialFunction[Signal, Behavior[Message]] = { + case Terminated(_) => Behaviors.stopped + } + + override def onMessage(msg: Message): Behavior[Message] = { + msg match { + case Destroy => + log.debug("Received request to destroy the process.") + tellDestroyer(ProcessDestroyer.Destroy) + case DestroyForcibly => + log.debug("Received request to forcibly destroy the process.") + tellDestroyer(ProcessDestroyer.DestroyForcibly) + case StdinTerminated => + log.debug("Stdin was terminated") + tellDestroyer(ProcessDestroyer.Inspect) + case StdoutTerminated => + log.debug("Stdout was terminated") + tellDestroyer(ProcessDestroyer.Inspect) + case StderrTerminated => + log.debug("Stderr was terminated") + tellDestroyer(ProcessDestroyer.Inspect) + } + this } - private def tellDestroyer(msg: Any) = - context.child("process-destroyer").foreach(_ ! msg) + private def tellDestroyer(msg: ProcessDestroyer.Message) = + context.child("process-destroyer").foreach(_.unsafeUpcast[ProcessDestroyer.Message] ! msg) } private object ProcessDestroyer { + sealed trait Message /** * The configuration key to use for the inspection interval. @@ -237,66 +256,64 @@ private object ProcessDestroyer { * instance if a process forks and a child continues to run when it dies, * it will have a reference to those handles. */ - case object Inspect + case object Inspect extends Message /** * Request that process.destroy() be called */ - case object Destroy + case object Destroy extends Message /** * Request that process.destroyForcibly() be called */ - case object DestroyForcibly - - def props(process: JavaProcess, exitValueReceiver: ActorRef): Props = - Props(new ProcessDestroyer(process, exitValueReceiver)) -} - -private class ProcessDestroyer(process: JavaProcess, exitValueReceiver: ActorRef) extends Actor with ActorLogging { - import ProcessDestroyer._ - import context.dispatcher - - private val inspectionInterval = - Duration( - context.system.settings.config.getDuration(InspectionInterval).toMillis, - TimeUnit.MILLISECONDS - ) - - private val inspectionTick = - context.system.scheduler.schedule(inspectionInterval, inspectionInterval, self, Inspect) - - def pkill(): Unit = { - if (Helpers.isWindows) { - process.destroy() - } else { - val pid = BlockingProcess.getPid(process).get - import sys.process._ - s"pkill -KILL -P $pid".! == 0 - } - } - - override def receive = { - case Destroy => - blocking(process.destroy()) - case DestroyForcibly => - blocking(process.destroyForcibly()) - case Inspect => - if (!process.isAlive) { - log.debug("Process has terminated, stopping self") - context.stop(self) + case object DestroyForcibly extends Message + + def apply(process: JavaProcess, messageAdapter: ActorRef[Response]): Behavior[Message] = { + Behaviors.withTimers { timers => + Behaviors.setup { context => + import context.log + + val inspectionInterval = context.system.settings.config.getDuration(InspectionInterval).toScala + + timers.startTimerWithFixedDelay(Inspect, inspectionInterval, inspectionInterval) + + def pkill(): Unit = { + if (Helpers.isWindows) { + process.destroy() + } else { + val pid = BlockingProcess.getPid(process).get + import sys.process._ + s"pkill -KILL -P $pid".! == 0 + } + } + + Behaviors.receiveMessage[Message] { + case Destroy => + blocking(process.destroy()) + Behaviors.same + case DestroyForcibly => + blocking(process.destroyForcibly()) + Behaviors.same + case Inspect => + if (!process.isAlive) { + log.debug("Process has terminated, stopping self") + Behaviors.stopped + } else { + Behaviors.same + } + }.receiveSignal { + case (_, PostStop) => + pkill() + + val exitValue = blocking { + process.destroy() + process.destroyForcibly() + process.waitFor() + } + messageAdapter ! BlockingProcess.Exited(exitValue) + Behaviors.same + } } - } - - override def postStop(): Unit = { - inspectionTick.cancel() - pkill() - - val exitValue = blocking { - process.destroy() - process.destroyForcibly() - process.waitFor() } - exitValueReceiver ! BlockingProcess.Exited(exitValue) } } diff --git a/utils/src/main/scala/com.olegych.scastie/util/ConfigLoaders.scala b/utils/src/main/scala/com.olegych.scastie/util/ConfigLoaders.scala new file mode 100644 index 000000000..ff2caa98e --- /dev/null +++ b/utils/src/main/scala/com.olegych.scastie/util/ConfigLoaders.scala @@ -0,0 +1,13 @@ +package com.olegych.scastie.util + +import com.typesafe.sslconfig.util.ConfigLoader._ +import com.typesafe.sslconfig.util.{ConfigLoader, EnrichedConfig} + +import java.nio.file.{Path, Paths} +import scala.language.implicitConversions + +object ConfigLoaders { + implicit def toConfigLoader[A](f: EnrichedConfig => A): ConfigLoader[A] = playConfigLoader.map(f) + + implicit val pathLoader: ConfigLoader[Path] = stringLoader.map(Paths.get(_)) +} diff --git a/utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala b/utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala new file mode 100644 index 000000000..09a84feb3 --- /dev/null +++ b/utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala @@ -0,0 +1,62 @@ +package com.olegych.scastie.util + +import com.fasterxml.jackson.core.{JsonGenerator, JsonLocation, JsonParseException, JsonParser} +import com.fasterxml.jackson.databind.{DeserializationContext, SerializerProvider} +import com.fasterxml.jackson.databind.deser.std.StdDeserializer +import com.fasterxml.jackson.databind.module.SimpleModule +import com.fasterxml.jackson.databind.ser.std.StdSerializer +import com.olegych.scastie.api.{FetchResult, FormatRequest, FormatResponse, Inputs, SnippetId, SnippetProgress, SnippetSummary} +import play.api.libs.json.{Format, Json, Reads, Writes} + +import java.io.ByteArrayInputStream +import scala.reflect.ClassTag + +class PlayJsonSerializer[T: Writes](cls: Class[T]) extends StdSerializer[T](cls) { + override def serialize(value: T, gen: JsonGenerator, provider: SerializerProvider): Unit = + gen.writeRawValue(Json.stringify(Json.toJson(value))) +} + +class PlayJsonDeserializer[T: Reads](cls: Class[T]) extends StdDeserializer[T](cls) { + override def deserialize(p: JsonParser, ctxt: DeserializationContext): T = { + def error() = throw new JsonParseException(p, "not support") + + p.getTokenLocation match { // current token is '{' + case JsonLocation.NA => error() + + case loc => loc.getSourceRef match { + case s: String => + val begin = loc.getCharOffset.toInt + p.skipChildren() // current token is '}' + val end = p.getCurrentLocation.getCharOffset.toInt + Json.parse(s.substring(begin, end)).as[T] + + case bytes: Array[Byte] => + val begin = loc.getByteOffset.toInt + p.skipChildren() // current token is '}' + val end = p.getCurrentLocation.getByteOffset.toInt + val in = new ByteArrayInputStream(bytes, begin, end - begin) + Json.parse(in).as[T] + + // don't need support for other cases + // Find usage of [[com.fasterxml.jackson.core.JsonFactory._createContext]] for all cases + case _ => error() + } + } + } +} + +class PlayJackson extends SimpleModule { + private def add[T: ClassTag: Format] = { + val cls = implicitly[ClassTag[T]].runtimeClass.asInstanceOf[Class[T]] + addSerializer(cls, new PlayJsonSerializer[T](cls)) + addDeserializer(cls, new PlayJsonDeserializer[T](cls)) + } + + add[SnippetId] + add[FetchResult] + add[SnippetSummary] + add[FormatRequest] + add[FormatResponse] + add[SnippetProgress] + add[Inputs] +} diff --git a/utils/src/main/scala/com.olegych.scastie/util/ProcessActor.scala b/utils/src/main/scala/com.olegych.scastie/util/ProcessActor.scala index 9b1599837..8d7915e47 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/ProcessActor.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/ProcessActor.scala @@ -1,63 +1,70 @@ package com.olegych.scastie.util +import akka.actor.typed.{ActorRef, Behavior} +import akka.actor.typed.scaladsl.{ActorContext, Behaviors, StashBuffer} +import akka.contrib.process.BlockingProcess.{Exited, Started} + import java.nio.file._ import java.time.Instant import java.util.concurrent.atomic.AtomicLong - -import akka.actor.{Actor, ActorRef, Props, Stash} import akka.contrib.process._ import akka.stream.scaladsl.{Flow, Framing, Sink, Source} -import akka.stream.{ActorMaterializer, ActorMaterializerSettings, OverflowStrategy, ThrottleMode} +import akka.stream.typed.scaladsl.ActorSource +import akka.stream.{Materializer, OverflowStrategy, ThrottleMode} import akka.util.ByteString import com.olegych.scastie.api.{ProcessOutput, ProcessOutputType} -import org.slf4j.LoggerFactory import scala.concurrent.duration._ object ProcessActor { - case class Input(line: String) - - case object Shutdown - - def props(command: List[String], - workingDir: Path = Paths.get(System.getProperty("user.dir")), - environment: Map[String, String] = Map.empty, - killOnExit: Boolean = false): Props = { - Props(new ProcessActor(command, workingDir, environment, killOnExit)) - } + sealed trait Message + + case class ProcessResponse(response: BlockingProcess.Response) extends Message + + case class Input(line: String) extends Message + + def apply( + replyTo: ActorRef[ProcessOutput], + command: List[String], + workingDir: Path = Paths.get(System.getProperty("user.dir")), + environment: Map[String, String] = Map.empty, + killOnExit: Boolean = false + ): Behavior[Message] = + Behaviors.setup { context => + Behaviors.withStash(100) { buffer => + new ProcessActor( + replyTo, command, workingDir, environment, killOnExit + )(context, buffer).receive + } + } } +import ProcessActor._ + /* > Output line type < Input line ! Exit */ -class ProcessActor(command: List[String], workingDir: Path, environment: Map[String, String], killOnExit: Boolean) - extends Actor - with Stash { - - import ProcessActor._ - - private val log = LoggerFactory.getLogger(getClass) - - // private val props = - // NonBlockingProcessPkill.props( - // command = command, - // workingDir = workingDir.toFile, - // environment = environment - // ) - // import NonBlockingProcess._ - - private val props = - BlockingProcess.props( - command = command, +class ProcessActor private( + replyTo: ActorRef[ProcessOutput], + command: List[String], + workingDir: Path, + environment: Map[String, String], + killOnExit: Boolean +)(context: ActorContext[Message], buffer: StashBuffer[Message]) { + import context.log + + context.spawn( + BlockingProcess( + command, workingDir = workingDir.toFile, - environment = environment - ) - import BlockingProcess._ - - private val process = context.actorOf(props, name = "process") + environment, + context.messageAdapter(ProcessResponse) + ), + name = "process" + ) private def lines(std: Source[ByteString, _]): Source[String, _] = { std @@ -71,13 +78,13 @@ class ProcessActor(command: List[String], workingDir: Path, environment: Map[Str .map(_.utf8String) } - private implicit val materializer = ActorMaterializer( - ActorMaterializerSettings(context.system) - ) + // https://doc.akka.io/docs/akka/current/stream/stream-flows-and-basics.html#actor-materializer-lifecycle + implicit val mat: Materializer = Materializer(context) private val outputId = new AtomicLong(0) - override def receive: Receive = { - case Started(pid, stdin, stdout, stderr) => + + def receive: Behavior[Message] = Behaviors.receiveMessage { + case ProcessResponse(Started(pid, stdin, stdout, stderr)) => println("process started: " + pid) lines(stdout) .map { line => @@ -94,36 +101,51 @@ class ProcessActor(command: List[String], workingDir: Path, environment: Map[Str case (ts, output) => val now = Instant.now println(s"> ${output.id.getOrElse(0)} ${now.toEpochMilli - ts.toEpochMilli}ms: ${output.line}") - context.parent ! output + replyTo ! output now }) - val stdin2: Source[ByteString, ActorRef] = - Source - .actorRef[Input](Int.MaxValue, OverflowStrategy.fail) + val stdin2: Source[ByteString, ActorRef[Input]] = + ActorSource + .actorRef[Input]( + // only completed/ failed when this ProcessActor stop + completionMatcher = PartialFunction.empty, + failureMatcher = PartialFunction.empty, + Int.MaxValue, + OverflowStrategy.fail + ) .map { case Input(line) => ByteString(line + "\n") } - val ref: ActorRef = + val ref: ActorRef[Input] = Flow[ByteString] .to(stdin) .runWith(stdin2) - context.become(active(ref)) - - unstashAll() + buffer.unstashAll(active(ref)) case input: Input => - stash() + buffer.stash(input) + Behaviors.same + + case x @ ProcessResponse(Exited(_)) => + log.error("Unexpected message {}", x) + Behaviors.same } - private def active(stdin: ActorRef): Receive = { + private def active(stdin: ActorRef[Input]): Behavior[Message] = Behaviors.receiveMessage { case input: Input => println(s"< ${outputId.incrementAndGet()}: $input") stdin ! input + Behaviors.same - case Exited(exitValue) => + case ProcessResponse(Exited(exitValue)) => if (killOnExit) { throw new Exception("process exited: " + exitValue) } + Behaviors.same + + case x @ ProcessResponse(_: Started) => + log.error("Unexpected message {}", x) + Behaviors.same } } diff --git a/utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala b/utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala deleted file mode 100644 index 0b3bce038..000000000 --- a/utils/src/main/scala/com.olegych.scastie/util/ReconnectingActor.scala +++ /dev/null @@ -1,65 +0,0 @@ -package com.olegych.scastie.util - -import akka.actor.{Actor, ActorContext, ActorLogging, Cancellable} -import akka.remote.DisassociatedEvent -import com.olegych.scastie.api.ActorConnected - -import scala.concurrent.ExecutionContext.Implicits.global -import scala.concurrent.duration._ - -case class ReconnectInfo(serverHostname: String, serverAkkaPort: Int, actorHostname: String, actorAkkaPort: Int) - -trait ActorReconnecting extends Actor with ActorLogging { - - private var tryReconnectCallback: Option[Cancellable] = None - - def reconnectInfo: Option[ReconnectInfo] - - def tryConnect(context: ActorContext): Unit - - def onConnected(): Unit = {} - - def onDisconnected(): Unit = {} - - private def setupReconnectCallback(context: ActorContext): Unit = { - if (reconnectInfo.isDefined) { - tryReconnectCallback.foreach(_.cancel()) - tryReconnectCallback = Some( - context.system.scheduler.schedule(0.seconds, 10.seconds) { - log.info("Reconnecting to server") - tryConnect(context) - } - ) - } - } - - override def preStart(): Unit = - try { - context.system.eventStream.subscribe(self, classOf[DisassociatedEvent]) - setupReconnectCallback(context) - } finally super.preStart() - - val reconnectBehavior: Receive = { - case ActorConnected => - log.info("Connected to server") - tryReconnectCallback.foreach(_.cancel()) - tryReconnectCallback = None - onConnected() - - case ev: DisassociatedEvent => { - println("DisassociatedEvent " + ev) - - val isServerHostname = - reconnectInfo.exists(info => ev.remoteAddress.host.contains(info.serverHostname)) - - val isServerAkkaPort = - reconnectInfo.exists(info => ev.remoteAddress.port.contains(info.serverAkkaPort)) - - if (isServerHostname && isServerAkkaPort && ev.inbound) { - log.warning("Disconnected from server") - onDisconnected() - setupReconnectCallback(context) - } - } - } -} diff --git a/utils/src/main/scala/com.olegych.scastie/util/SbtTask.scala b/utils/src/main/scala/com.olegych.scastie/util/SbtTask.scala index b0dfb5a05..cef12366c 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/SbtTask.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/SbtTask.scala @@ -1,13 +1,28 @@ package com.olegych.scastie.util import com.olegych.scastie.api._ +import akka.NotUsed +import akka.actor.typed.ActorRef +import akka.actor.typed.receptionist.ServiceKey -import akka.actor.ActorRef +sealed trait SbtMessage +// note: DispatchActor.Message alias to this +trait BalancerMessage -case class SbtTask(snippetId: SnippetId, inputs: Inputs, ip: String, login: Option[String], progressActor: ActorRef) +case class FormatReq(replyTo: ActorRef[FormatResponse], r: FormatRequest) extends SbtMessage with BalancerMessage -case class SbtRun(snippetId: SnippetId, inputs: Inputs, progressActor: ActorRef, snippetActor: ActorRef) +case class SnippetProgressAsk(replyTo: ActorRef[NotUsed], v: SnippetProgress) extends BalancerMessage -case class Replay(run: SbtRun) +case class SbtTask( + snippetId: SnippetId, + inputs: Inputs, + ip: String, + login: Option[String], + progressActor: ActorRef[SnippetProgress], + snippetActor: ActorRef[SnippetProgressAsk], +) extends SbtMessage -case object SbtUp +object Services { + val SbtRunner: ServiceKey[SbtMessage] = ServiceKey("SbtRunner") + val Balancer: ServiceKey[BalancerMessage] = ServiceKey("Balancer") +} diff --git a/utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala b/utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala new file mode 100644 index 000000000..da3b9914b --- /dev/null +++ b/utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala @@ -0,0 +1,43 @@ +package com.olegych.scastie.util + +import com.typesafe.config.{Config, ConfigRenderOptions} +import scala.collection.mutable + +object ShowConfig { + /** Helper function to show config only at `paths` */ + def apply(c: Config, paths: String): String = { + val groups = mutable.Stack.empty[String] + + def valueAt(path: String) = { + val fullPath = (groups :+ path.trim).mkString(".") + c.getValue(fullPath).render(opt) + } + + paths.linesIterator.map { + // empty or comment + case s if s.trim.isEmpty || s.trim.startsWith("#") => + s + // start a new group + case s if s.trim.endsWith("{") => + groups.push(s.trim.dropRight(1).trim) + s + // end prev group + case s if s.trim.startsWith("}") => + groups.pop() + s + // override + case s if s.contains(':') || s.contains('=') => + val Array(path, newValue) = s.split(Array(':', '=')) + val value = valueAt(path) + if (newValue.trim == value) s + else s"$s # Overridden. Old value = $value" + // normal path + case s => + val leadingSpaces = "\n" + s.takeWhile(_ == ' ') + val value = valueAt(s).linesIterator.mkString(leadingSpaces).trim + s"$s: $value" + }.mkString("\n") + } + + private val opt = ConfigRenderOptions.concise().setFormatted(true) +} diff --git a/utils/src/test/scala/com.olegych.scastie.util/ProcessActorTest.scala b/utils/src/test/scala/com.olegych.scastie.util/ProcessActorTest.scala index 6eb1a6478..3e21a1c37 100644 --- a/utils/src/test/scala/com.olegych.scastie.util/ProcessActorTest.scala +++ b/utils/src/test/scala/com.olegych.scastie.util/ProcessActorTest.scala @@ -45,15 +45,21 @@ class ProcessActorTest() extends TestKit(ActorSystem("ProcessActorTest")) with I } } - override def afterAll: Unit = { + override def afterAll(): Unit = { TestKit.shutdownActorSystem(system) } } class ProcessReceiver(command: String, probe: ActorRef) extends Actor { - private val props = - ProcessActor.props(command = List("bash", "-c", command.replace("\\", "/")), killOnExit = false) - private val process = context.actorOf(props, name = "process-receiver") + import akka.actor.typed.scaladsl.adapter._ + + private val process = context.spawn( + ProcessActor( + replyTo = self.toTyped[ProcessOutput], + command = List("bash", "-c", command.replace("\\", "/")) + ), + name = "process-receiver" + ) override def receive: Receive = { case output: ProcessOutput => probe ! output From 20620512551c71f4916f4888f3eb303565e924ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Tue, 10 Aug 2021 15:53:44 +0700 Subject: [PATCH 22/42] Add PlayJson (de)serializer test --- .../PlayJacksonTestBase.scala | 32 ++++++++++++++ .../PlayJsonDeserializerTest.scala | 43 +++++++++++++++++++ .../PlayJsonSerializerTest.scala | 40 +++++++++++++++++ 3 files changed, 115 insertions(+) create mode 100644 utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala create mode 100644 utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala create mode 100644 utils/src/test/scala/com.olegych.scastie.util/PlayJsonSerializerTest.scala diff --git a/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala b/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala new file mode 100644 index 000000000..c3bb1695c --- /dev/null +++ b/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala @@ -0,0 +1,32 @@ +package com.olegych.scastie.util + +import com.fasterxml.jackson.databind.Module +import com.fasterxml.jackson.databind.json.JsonMapper +import com.fasterxml.jackson.databind.module.SimpleModule +import com.olegych.scastie.api.{BaseInputs, Project, ScalaTarget, ShortInputs} +import play.api.libs.json.{Reads, Writes} + +import scala.reflect.ClassTag + +object PlayJacksonTestBase { + def mapper(modules: Module*): JsonMapper = + modules.foldLeft(JsonMapper.builder()){ + (b, m) => b.addModule(m) + }.build() + + def serializerModule[T: ClassTag: Writes]: SimpleModule = { + val cls = implicitly[ClassTag[T]].runtimeClass.asInstanceOf[Class[T]] + new SimpleModule().addSerializer(new PlayJsonSerializer(cls)) + } + + def deserializerModule[T: ClassTag: Reads]: SimpleModule = { + val cls = implicitly[ClassTag[T]].runtimeClass.asInstanceOf[Class[T]] + new SimpleModule().addDeserializer(cls, new PlayJsonDeserializer(cls)) + } + + val inputs: BaseInputs = ShortInputs("code", ScalaTarget.Scala3("3.0.1")) + val jsonInputs = """{"code":"code","target":{"dottyVersion":"3.0.1","tpe":"Scala3"}}""" + + val project: Project = Project("org", "repo", Some("logo"), List("art1")) + val jsonProject = """{"organization":"org","repository":"repo","logo":"logo","artifacts":["art1"]}""" +} diff --git a/utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala b/utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala new file mode 100644 index 000000000..2f67e3321 --- /dev/null +++ b/utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala @@ -0,0 +1,43 @@ +package com.olegych.scastie.util + +import com.fasterxml.jackson.databind.exc.InvalidDefinitionException +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import com.olegych.scastie.api.{Project, ScalaTarget, ShortInputs} +import com.olegych.scastie.util.PlayJacksonTestBase._ +import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.must.Matchers + +class PlayJsonDeserializerTest extends AnyFunSuite with Matchers { + test("plain jackson/ simple case class !fail!") { + assertThrows[InvalidDefinitionException] { + mapper().readValue(jsonInputs, classOf[ShortInputs]) + } + } + + test("DefaultScalaModule/ simple case class") { + mapper(DefaultScalaModule) + .readValue(jsonProject, classOf[Project]) mustBe project + } + + test("DefaultScalaModule/ complex class !fail!") { + assertThrows[InvalidDefinitionException] { + mapper(DefaultScalaModule) + .readValue(jsonInputs, classOf[ShortInputs]) + } + } + + test("PlayJson/ simple case class") { + mapper(deserializerModule[Project]) + .readValue(jsonProject, classOf[Project]) mustBe project + } + + test("PlayJson/ complex class success. Don't need Serializer for inner field type (ScalaTarget)") { + mapper(deserializerModule[ShortInputs]) + .readValue(jsonInputs, classOf[ShortInputs]) mustBe inputs + } + + test("PlayJson for one field (ScalaTarget) & DefaultScalaModule for outer object (ShortInputs)") { + mapper(deserializerModule[ScalaTarget], DefaultScalaModule) + .readValue(jsonInputs, classOf[ShortInputs]) mustBe inputs + } +} diff --git a/utils/src/test/scala/com.olegych.scastie.util/PlayJsonSerializerTest.scala b/utils/src/test/scala/com.olegych.scastie.util/PlayJsonSerializerTest.scala new file mode 100644 index 000000000..6fd8c3b64 --- /dev/null +++ b/utils/src/test/scala/com.olegych.scastie.util/PlayJsonSerializerTest.scala @@ -0,0 +1,40 @@ +package com.olegych.scastie.util + +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import com.olegych.scastie.api.{Project, ScalaTarget, ShortInputs} +import com.olegych.scastie.util.PlayJacksonTestBase._ +import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.must.Matchers + +class PlayJsonSerializerTest extends AnyFunSuite with Matchers { + test("plain jackson/ simple case class !fail!") { + val ret = mapper().writeValueAsString(inputs) + ret must not be jsonInputs + ret mustBe "{}" + } + + test("DefaultScalaModule/ simple case class") { + mapper(DefaultScalaModule).writeValueAsString(project) mustBe jsonProject + } + + test("DefaultScalaModule/ complex class !fail!") { + val ret = mapper().writeValueAsString(inputs) + ret must not be jsonInputs + ret mustBe "{}" + } + + test("PlayJson/ simple case class") { + mapper(serializerModule[Project]) + .writeValueAsString(project) mustBe jsonProject + } + + test("PlayJson/ complex class success. Don't need Serializer for inner field type (ScalaTarget)") { + mapper(serializerModule[ShortInputs]) + .writeValueAsString(inputs) mustBe jsonInputs + } + + test("PlayJson for one field (ScalaTarget) & DefaultScalaModule for outer object (ShortInputs)") { + mapper(serializerModule[ScalaTarget], DefaultScalaModule) + .writeValueAsString(inputs) mustBe jsonInputs + } +} From acc5cff6a393a81580fab81273cc292d8a3d4ad8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Tue, 10 Aug 2021 16:29:34 +0700 Subject: [PATCH 23/42] Add a special test case for PlayJsonDeserializer --- .../PlayJacksonTestBase.scala | 18 +++++++++++++++++- .../PlayJsonDeserializerTest.scala | 6 ++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala b/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala index c3bb1695c..d261aef79 100644 --- a/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala +++ b/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala @@ -4,7 +4,7 @@ import com.fasterxml.jackson.databind.Module import com.fasterxml.jackson.databind.json.JsonMapper import com.fasterxml.jackson.databind.module.SimpleModule import com.olegych.scastie.api.{BaseInputs, Project, ScalaTarget, ShortInputs} -import play.api.libs.json.{Reads, Writes} +import play.api.libs.json.{Format, Json, Reads, Writes} import scala.reflect.ClassTag @@ -29,4 +29,20 @@ object PlayJacksonTestBase { val project: Project = Project("org", "repo", Some("logo"), List("art1")) val jsonProject = """{"organization":"org","repository":"repo","logo":"logo","artifacts":["art1"]}""" + + /** class that has other field after `target` + * This is used to verify that when deserializing the following input source:{{{ + * {"target":{...},"code":"some code"} + * ^ + * }}} + * using PlayJsonDeserializer for ScalaTarget and other deserializer for Input2 and other fields + * then PlayJsonDeserializer do NOT consume the input source beyond the ending '}' location + * for `target` field + */ + case class Input2(target: ScalaTarget, code: String) + object Input2 { + implicit val format: Format[Input2] = Json.format[Input2] + } + val input2:Input2 = Input2(ScalaTarget.Scala3("3.0.1"), "code") + val jsonInput2 = """{"target":{"dottyVersion":"3.0.1","tpe":"Scala3"},"code":"code"}""" } diff --git a/utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala b/utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala index 2f67e3321..413511a6a 100644 --- a/utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala +++ b/utils/src/test/scala/com.olegych.scastie.util/PlayJsonDeserializerTest.scala @@ -40,4 +40,10 @@ class PlayJsonDeserializerTest extends AnyFunSuite with Matchers { mapper(deserializerModule[ScalaTarget], DefaultScalaModule) .readValue(jsonInputs, classOf[ShortInputs]) mustBe inputs } + + // see comments of class [[Input2]] + test("PlayJson/ deserializer don't consume input source beyond the ending '}' location of the registered field's class") { + mapper(deserializerModule[ScalaTarget], DefaultScalaModule) + .readValue(jsonInput2, classOf[Input2]) mustBe input2 + } } From 8c204aa22c6e4fe097235e5b801ea71a56026afe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Tue, 10 Aug 2021 16:34:22 +0700 Subject: [PATCH 24/42] Use play-json's JsValueDeserializer The previous JsValueDeserializer implementation is working but it's a bit hacking :D --- .../util/PlayJackson.scala | 41 +++---------------- .../json/jackson/PlayJsonDeserializer.scala | 17 ++++++++ .../PlayJacksonTestBase.scala | 1 + 3 files changed, 23 insertions(+), 36 deletions(-) create mode 100644 utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala diff --git a/utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala b/utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala index 09a84feb3..d414f841c 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/PlayJackson.scala @@ -1,14 +1,12 @@ package com.olegych.scastie.util -import com.fasterxml.jackson.core.{JsonGenerator, JsonLocation, JsonParseException, JsonParser} -import com.fasterxml.jackson.databind.{DeserializationContext, SerializerProvider} -import com.fasterxml.jackson.databind.deser.std.StdDeserializer +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.databind.SerializerProvider import com.fasterxml.jackson.databind.module.SimpleModule import com.fasterxml.jackson.databind.ser.std.StdSerializer -import com.olegych.scastie.api.{FetchResult, FormatRequest, FormatResponse, Inputs, SnippetId, SnippetProgress, SnippetSummary} -import play.api.libs.json.{Format, Json, Reads, Writes} - -import java.io.ByteArrayInputStream +import com.olegych.scastie.api._ +import play.api.libs.json.jackson.PlayJsonDeserializer +import play.api.libs.json.{Format, Json, Writes} import scala.reflect.ClassTag class PlayJsonSerializer[T: Writes](cls: Class[T]) extends StdSerializer[T](cls) { @@ -16,35 +14,6 @@ class PlayJsonSerializer[T: Writes](cls: Class[T]) extends StdSerializer[T](cls) gen.writeRawValue(Json.stringify(Json.toJson(value))) } -class PlayJsonDeserializer[T: Reads](cls: Class[T]) extends StdDeserializer[T](cls) { - override def deserialize(p: JsonParser, ctxt: DeserializationContext): T = { - def error() = throw new JsonParseException(p, "not support") - - p.getTokenLocation match { // current token is '{' - case JsonLocation.NA => error() - - case loc => loc.getSourceRef match { - case s: String => - val begin = loc.getCharOffset.toInt - p.skipChildren() // current token is '}' - val end = p.getCurrentLocation.getCharOffset.toInt - Json.parse(s.substring(begin, end)).as[T] - - case bytes: Array[Byte] => - val begin = loc.getByteOffset.toInt - p.skipChildren() // current token is '}' - val end = p.getCurrentLocation.getByteOffset.toInt - val in = new ByteArrayInputStream(bytes, begin, end - begin) - Json.parse(in).as[T] - - // don't need support for other cases - // Find usage of [[com.fasterxml.jackson.core.JsonFactory._createContext]] for all cases - case _ => error() - } - } - } -} - class PlayJackson extends SimpleModule { private def add[T: ClassTag: Format] = { val cls = implicitly[ClassTag[T]].runtimeClass.asInstanceOf[Class[T]] diff --git a/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala b/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala new file mode 100644 index 000000000..4b95be6b7 --- /dev/null +++ b/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala @@ -0,0 +1,17 @@ +package play.api.libs.json.jackson + +import com.fasterxml.jackson.core.JsonParser +import com.fasterxml.jackson.databind.DeserializationContext +import com.fasterxml.jackson.databind.deser.std.StdDeserializer +import play.api.libs.json.{JsObject, JsonParserSettings, Reads} + +class PlayJsonDeserializer[T: Reads](cls: Class[T]) extends StdDeserializer[T](cls) { + override def deserialize(p: JsonParser, ctxt: DeserializationContext): T = { + val der = new JsValueDeserializer( + ctxt.getTypeFactory, + classOf[JsObject], + JsonParserSettings.settings + ) + der.deserialize(p, ctxt).as[T] + } +} diff --git a/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala b/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala index d261aef79..f3ca56272 100644 --- a/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala +++ b/utils/src/test/scala/com.olegych.scastie.util/PlayJacksonTestBase.scala @@ -4,6 +4,7 @@ import com.fasterxml.jackson.databind.Module import com.fasterxml.jackson.databind.json.JsonMapper import com.fasterxml.jackson.databind.module.SimpleModule import com.olegych.scastie.api.{BaseInputs, Project, ScalaTarget, ShortInputs} +import play.api.libs.json.jackson.PlayJsonDeserializer import play.api.libs.json.{Format, Json, Reads, Writes} import scala.reflect.ClassTag From 5e3dacf2a8c470e787f807723fe9a86a19e22e27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Tue, 10 Aug 2021 17:11:19 +0700 Subject: [PATCH 25/42] Copy code from play-json's JsValueDeserializer before modifying --- .../json/jackson/PlayJsonDeserializer.scala | 100 ++++++++++++++++-- 1 file changed, 91 insertions(+), 9 deletions(-) diff --git a/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala b/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala index 4b95be6b7..e71110459 100644 --- a/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala +++ b/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala @@ -1,17 +1,99 @@ package play.api.libs.json.jackson -import com.fasterxml.jackson.core.JsonParser +import com.fasterxml.jackson.core.{JsonParser, JsonTokenId} import com.fasterxml.jackson.databind.DeserializationContext import com.fasterxml.jackson.databind.deser.std.StdDeserializer -import play.api.libs.json.{JsObject, JsonParserSettings, Reads} +import play.api.libs.json._ +import JsonParserSettings.{settings => parserSettings} +import scala.annotation.{switch, tailrec} +import scala.collection.mutable.{ArrayBuffer, ListBuffer} class PlayJsonDeserializer[T: Reads](cls: Class[T]) extends StdDeserializer[T](cls) { - override def deserialize(p: JsonParser, ctxt: DeserializationContext): T = { - val der = new JsValueDeserializer( - ctxt.getTypeFactory, - classOf[JsObject], - JsonParserSettings.settings - ) - der.deserialize(p, ctxt).as[T] + override def deserialize(p: JsonParser, ctxt: DeserializationContext): T = + deserialize(p, ctxt, List()).as[T] + + // copy from play-json 2.10.0-RC5 / play.api.libs.json.jackson.JsValueDeserializer.parseBigDecimal + private def parseBigDecimal( + jp: JsonParser, + parserContext: List[DeserializerContext] + ): (Some[JsNumber], List[DeserializerContext]) = { + BigDecimalParser.parse(jp.getText, parserSettings) match { + case JsSuccess(bigDecimal, _) => + (Some(JsNumber(bigDecimal)), parserContext) + + case JsError((_, JsonValidationError("error.expected.numberdigitlimit" +: _) +: _) +: _) => + throw new IllegalArgumentException(s"Number is larger than supported for field '${jp.currentName}'") + + case JsError((_, JsonValidationError("error.expected.numberscalelimit" +: _, args @ _*) +: _) +: _) => + val scale = args.headOption.fold("")(scale => s" ($scale)") + throw new IllegalArgumentException(s"Number scale$scale is out of limits for field '${jp.currentName}'") + + case JsError((_, JsonValidationError("error.expected.numberformatexception" +: _) +: _) +: _) => + throw new NumberFormatException + + case JsError(errors) => + throw JsResultException(errors) + } + } + + // copy from play-json 2.10.0-RC5 / play.api.libs.json.jackson.JsValueDeserializer.parseBigDecimal + @tailrec + final def deserialize( + jp: JsonParser, + ctxt: DeserializationContext, + parserContext: List[DeserializerContext] + ): JsValue = { + if (jp.getCurrentToken == null) { + jp.nextToken() // happens when using treeToValue (we're not parsing tokens) + } + + val valueAndCtx = (jp.getCurrentToken.id(): @switch) match { + case JsonTokenId.ID_NUMBER_INT | JsonTokenId.ID_NUMBER_FLOAT => parseBigDecimal(jp, parserContext) + + case JsonTokenId.ID_STRING => (Some(JsString(jp.getText)), parserContext) + + case JsonTokenId.ID_TRUE => (Some(JsBoolean(true)), parserContext) + + case JsonTokenId.ID_FALSE => (Some(JsBoolean(false)), parserContext) + + case JsonTokenId.ID_NULL => (Some(JsNull), parserContext) + + case JsonTokenId.ID_START_ARRAY => (None, ReadingList(ArrayBuffer()) +: parserContext) + + case JsonTokenId.ID_END_ARRAY => + parserContext match { + case ReadingList(content) :: stack => (Some(JsArray(content)), stack) + case _ => throw new RuntimeException("We should have been reading list, something got wrong") + } + + case JsonTokenId.ID_START_OBJECT => (None, ReadingMap(ListBuffer()) +: parserContext) + + case JsonTokenId.ID_FIELD_NAME => + parserContext match { + case (c: ReadingMap) :: stack => (None, c.setField(jp.getCurrentName) +: stack) + case _ => throw new RuntimeException("We should be reading map, something got wrong") + } + + case JsonTokenId.ID_END_OBJECT => + parserContext match { + case ReadingMap(content) :: stack => (Some(JsObject(content)), stack) + case _ => throw new RuntimeException("We should have been reading an object, something got wrong") + } + + case JsonTokenId.ID_NOT_AVAILABLE => + throw new RuntimeException("We should have been reading an object, something got wrong") + + case JsonTokenId.ID_EMBEDDED_OBJECT => + throw new RuntimeException("We should have been reading an object, something got wrong") + } + + // Read ahead + jp.nextToken() + + valueAndCtx match { + case (Some(v), Nil) => v // done, no more tokens and got a value! + case (Some(v), previous :: stack) => deserialize(jp, ctxt, previous.addValue(v) :: stack) + case (None, nextContext) => deserialize(jp, ctxt, nextContext) + } } } From ca0ac20ed18b27e3f98222dd35d76defa7095d38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Tue, 10 Aug 2021 17:14:53 +0700 Subject: [PATCH 26/42] PlayJsonDeserializer: Don't consume all input source Don't consume input source beyond the ending '}' location of the parsing JsObject --- .../libs/json/jackson/PlayJsonDeserializer.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala b/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala index e71110459..d6927ac71 100644 --- a/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala +++ b/utils/src/main/scala/play/api/libs/json/jackson/PlayJsonDeserializer.scala @@ -37,6 +37,8 @@ class PlayJsonDeserializer[T: Reads](cls: Class[T]) extends StdDeserializer[T](c } // copy from play-json 2.10.0-RC5 / play.api.libs.json.jackson.JsValueDeserializer.parseBigDecimal + // with minimal modifying: + // Don't consume input source beyond the ending '}' location of the parsing JsObject @tailrec final def deserialize( jp: JsonParser, @@ -87,13 +89,14 @@ class PlayJsonDeserializer[T: Reads](cls: Class[T]) extends StdDeserializer[T](c throw new RuntimeException("We should have been reading an object, something got wrong") } - // Read ahead - jp.nextToken() - valueAndCtx match { case (Some(v), Nil) => v // done, no more tokens and got a value! - case (Some(v), previous :: stack) => deserialize(jp, ctxt, previous.addValue(v) :: stack) - case (None, nextContext) => deserialize(jp, ctxt, nextContext) + case (Some(v), previous :: stack) => + jp.nextToken() // Read ahead + deserialize(jp, ctxt, previous.addValue(v) :: stack) + case (None, nextContext) => + jp.nextToken() // Read ahead + deserialize(jp, ctxt, nextContext) } } } From 7bc8df6cc931645375679d82923cc0d947bba4fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Thu, 12 Aug 2021 13:22:23 +0700 Subject: [PATCH 27/42] logback.xml use ASYNC appender as recommended by akka --- deployment/logback.xml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/deployment/logback.xml b/deployment/logback.xml index 7b56b8f96..a99315ca1 100644 --- a/deployment/logback.xml +++ b/deployment/logback.xml @@ -25,6 +25,12 @@ + + 8192 + true + + + WARN @@ -35,7 +41,7 @@ - + From 741aedce1c15ca74f38e247bf3a603c9254c8c26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 9 Aug 2021 22:20:07 +0700 Subject: [PATCH 28/42] Add ShowConfigTest --- .../ShowConfigTest.scala | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala diff --git a/utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala b/utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala new file mode 100644 index 000000000..632a4f3b6 --- /dev/null +++ b/utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala @@ -0,0 +1,59 @@ +package com.olegych.scastie.util + +import com.typesafe.config.ConfigFactory +import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.must.Matchers + +class ShowConfigTest extends AnyFunSuite with Matchers { + private val config = ConfigFactory.parseString( + """ + |a: 1 + |b.c = "str" + |d: { + | d1 = 1s + | d2.x = ${a} + |} + |""".stripMargin).resolve() + + test("single simple path") { + ShowConfig(config, "a") mustBe "a: 1" + } + + test("complex path") { + ShowConfig(config, "d") mustBe + """d: { + | "d1" : "1s", + | "d2" : { + | "x" : 1 + | } + |}""".stripMargin + } + + test("with comments and line breaks") { + ShowConfig(config, + """ + | # comment + |b.c + | + |""".stripMargin) mustBe + """ + | # comment + |b.c: "str" + |""".stripMargin + } + + test("with group") { + ShowConfig(config, + """|d { + | d2.x + |} + |""".stripMargin) mustBe + """|d { + | d2.x: 1 + |}""".stripMargin + } + + test("with overridden") { + ShowConfig(config, "d.d2.x = 2") mustBe "d.d2.x = 2 # Overridden. Old value = 1" + } +} From 6fef919156aa872fe6dfa6fe19ed5d4dc572d1d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Wed, 28 Jul 2021 23:54:40 +0700 Subject: [PATCH 29/42] sbt: Remove sbt-assembly, use adoptopenjdk and add server/docker --- build.sbt | 43 ++------- project/DockerHelper.scala | 183 +++++++++++++++++++------------------ project/plugins.sbt | 1 - 3 files changed, 102 insertions(+), 125 deletions(-) diff --git a/build.sbt b/build.sbt index 5c8a49511..b3e082f1a 100644 --- a/build.sbt +++ b/build.sbt @@ -1,5 +1,6 @@ import SbtShared._ import com.typesafe.sbt.SbtNativePackager.Universal +import DockerHelper.{serverDockerfile, runnerDockerfile} def akka(module: String) = "com.typesafe.akka" %% ("akka-" + module) % ( if(module.startsWith("http")) "10.2.5" else "2.6.15" @@ -68,7 +69,6 @@ lazy val runnerRuntimeDependencies = (api.projectRefs ++ runtimeScala.projectRef )).map(_ / publishLocal) lazy val runnerRuntimeDependenciesInTest = Seq( - assembly / test := {}, Test / test := (Test / test).dependsOn(runnerRuntimeDependencies: _*).value, Test / testOnly := (Test / testOnly).dependsOn(runnerRuntimeDependencies: _*).evaluated, Test / testQuick := (Test / testQuick).dependsOn(runnerRuntimeDependencies: _*).evaluated @@ -83,15 +83,12 @@ lazy val smallRunnerRuntimeDependenciesInTest = { sbtScastie ).map(_ / publishLocal) Seq( - assembly / test := {}, Test / test := (Test / test).dependsOn(smallRunnerRuntimeDependencies: _*).value, Test / testOnly := (Test / testOnly).dependsOn(smallRunnerRuntimeDependencies: _*).evaluated, Test / testQuick := (Test / testQuick).dependsOn(smallRunnerRuntimeDependencies: _*).evaluated ) } -lazy val dockerOrg = "scalacenter" - lazy val sbtRunner = project .in(file("sbt-runner")) .settings(baseNoCrossSettings) @@ -106,37 +103,11 @@ lazy val sbtRunner = project akka("actor-testkit-typed") % Test, "org.scalameta" %% "scalafmt-core" % "3.0.0-RC6" ), - docker / imageNames := Seq( - ImageName( - namespace = Some(dockerOrg), - repository = "scastie-sbt-runner", - tag = Some(gitHashNow) - ) - ), - docker / dockerfile := Def - .task { - DockerHelper( - baseDirectory = (ThisBuild / baseDirectory).value.toPath, - sbtTargetDir = target.value.toPath, - ivyHome = ivyPaths.value.ivyHome.get.toPath, - organization = organization.value, - artifact = assembly.value.toPath, - sbtScastie = (sbtScastie / moduleName).value - ) - } - .dependsOn(runnerRuntimeDependencies: _*) - .value, - assembly / assemblyMergeStrategy := { - case PathList("META-INF", xs @ _*) => MergeStrategy.discard - case in @ PathList("reference.conf", xs @ _*) => { - val old = (assembly / assemblyMergeStrategy).value - old(in) - } - case x => MergeStrategy.first - } + dockerImageName := "scastie-sbt-runner", + docker / dockerfile := runnerDockerfile(sbtScastie).dependsOn(runnerRuntimeDependencies: _*).value, ) .dependsOn(api.jvm(ScalaVersions.jvm), instrumentation, utils) - .enablePlugins(sbtdocker.DockerPlugin, BuildInfoPlugin) + .enablePlugins(sbtdocker.DockerPlugin, JavaServerAppPackaging, BuildInfoPlugin) lazy val server = project .settings(baseNoCrossSettings) @@ -155,9 +126,11 @@ lazy val server = project "ch.megard" %% "akka-http-cors" % "0.4.2", akka("actor-testkit-typed") % Test, akka("http-testkit") % Test - ) + ), + dockerImageName := "scastie-server", + docker / dockerfile := serverDockerfile().value, ) - .enablePlugins(JavaServerAppPackaging) + .enablePlugins(sbtdocker.DockerPlugin, JavaServerAppPackaging) .dependsOn(api.jvm(ScalaVersions.jvm), utils, balancer) lazy val balancer = project diff --git a/project/DockerHelper.scala b/project/DockerHelper.scala index 5b9205740..8cd8df468 100644 --- a/project/DockerHelper.scala +++ b/project/DockerHelper.scala @@ -1,21 +1,82 @@ +import sbt._ +import sbt.Keys._ import SbtShared._ +import com.typesafe.sbt.SbtNativePackager +import com.typesafe.sbt.SbtNativePackager.Universal +import com.typesafe.sbt.packager.universal.UniversalPlugin +import com.typesafe.sbt.packager.Keys.{bashScriptExtraDefines, executableScriptName, stage} +import com.typesafe.sbt.packager.archetypes.scripts.BashStartScriptPlugin +import sbtdocker.DockerPlugin +import sbtdocker.DockerPlugin.autoImport.{ImageName, docker, imageNames} +import sbtdocker.immutable.Dockerfile +import sbtdocker.Instructions.Run + +object DockerHelper extends AutoPlugin { + override def requires = SbtNativePackager && UniversalPlugin && DockerPlugin && BashStartScriptPlugin + override def trigger = allRequirements + object autoImport { + val dockerImageName = settingKey[String]("docker image name") + } + import autoImport._ + + private val dockerOrg = "scalacenter" + private val appDir = "/app" + private val username = "scastie" + private val uid = 433 + private val chown = s"$uid:$uid" + private val userHome = s"/home/$username" + + override lazy val projectSettings: Seq[Setting[_]] = Seq( + docker / imageNames := Seq( + ImageName( + namespace = Some(dockerOrg), + repository = dockerImageName.value, + tag = Some(gitHashNow) + ) + ), + // https://www.scala-sbt.org/sbt-native-packager/archetypes/cheatsheet.html#extra-defines + bashScriptExtraDefines += s"""addJava "-Dlogback.configurationFile=$appDir/conf/logback.xml"""", + Universal / mappings += ( + (ThisBuild / baseDirectory).value / "deployment" / "logback.xml" -> "conf/logback.xml" + ), + ) + + private def baseDockerfile(fromImg: String, stageDir: File): Dockerfile = + Dockerfile.empty + .from(fromImg) + .runRaw(s"""\\ + groupadd -g $uid $username && \\ + useradd -md $userHome -g $username -u $uid -s /bin/sh $username""") + .env( + "LANG" -> "en_US.UTF-8", + "HOME" -> userHome + ) + .copy(stageDir, appDir, chown) -import sbtdocker.DockerPlugin.autoImport._ + private def entrypoint = Def.setting { + s"$appDir/bin/${executableScriptName.value}" + } -import java.nio.file.Path + def serverDockerfile(): Def.Initialize[Task[Dockerfile]] = Def.task { + baseDockerfile("adoptopenjdk:8u292-b10-jre-hotspot", stage.value) + .user(username) + .workDir(appDir) + .env("DATA_DIR" -> s"$appDir/data") + .volume(s"$appDir/data") + .entryPoint(entrypoint.value) + } -object DockerHelper { - def apply(baseDirectory: Path, sbtTargetDir: Path, sbtScastie: String, ivyHome: Path, organization: String, artifact: Path): Dockerfile = { + def runnerDockerfile(sbtScastie: Project): Def.Initialize[Task[Dockerfile]] = Def.task { + val sbtTargetDir = target.value + val ivyHome = ivyPaths.value.ivyHome.get.toPath + val org = organization.value + val sbtScastieModuleName = (sbtScastie / moduleName).value - val artifactTargetPath = s"/app/${artifact.getFileName()}" - val generatedProjects = new GenerateProjects(sbtTargetDir) + val generatedProjects = new GenerateProjects(sbtTargetDir.toPath) generatedProjects.generateSbtProjects() - val logbackConfDestination = "/home/scastie/logback.xml" - - val ivyLocalTemp = sbtTargetDir.resolve("ivy") - - sbt.IO.delete(ivyLocalTemp.toFile) + val ivyLocalTemp = sbtTargetDir / "ivy" + sbt.IO.delete(ivyLocalTemp) /* sbt-scastie / scala_2.10 / sbt_0.13 / 0.25.0 @@ -23,12 +84,11 @@ object DockerHelper { 0 1 2 3 */ - CopyRecursively( - source = ivyHome.resolve(s"local/$organization"), - destination = ivyLocalTemp, + source = ivyHome.resolve(s"local/$org"), + destination = ivyLocalTemp.toPath, directoryFilter = { (dir, depth) => - lazy val isSbtScastiePath = dir.getName(0).toString == sbtScastie + lazy val isSbtScastiePath = dir.getName(0).toString == sbtScastieModuleName lazy val dirName = dir.getFileName.toString if (depth == 1) { @@ -41,80 +101,25 @@ object DockerHelper { } ) - val containerUsername = "sbtRunnerContainer" - - val sbtGlobal = sbtTargetDir.resolve(".sbt") - sbtGlobal.toFile.mkdirs() - - new Dockerfile { - from("openjdk:8u171-jdk-alpine") - - // Install ca-certificates for wget https - runRaw("apk update") - runRaw("apk --update upgrade") - runRaw("apk add ca-certificates") - runRaw("update-ca-certificates") - runRaw("apk add openssl") - runRaw("apk add nss") - runRaw("apk add bash") - - - runRaw("mkdir -p /app/sbt") - - runRaw( - s"wget https://github.com/sbt/sbt/releases/download/v${distSbtVersion}/sbt-${distSbtVersion}.tgz -O /tmp/sbt-${distSbtVersion}.tgz" - ) - runRaw(s"tar -xzvf /tmp/sbt-$distSbtVersion.tgz -C /app/sbt") - - runRaw("ln -s /app/sbt/sbt/bin/sbt /usr/local/bin/sbt") - - val userHome = s"/home/$containerUsername" - - runRaw(s"addgroup -g 433 $containerUsername") - runRaw( - s"adduser -h $userHome -G $containerUsername -D -u 433 -s /bin/sh $containerUsername" - ) - - def chown(dir: String) = { - user("root") - runRaw(s"chown -R $containerUsername:$containerUsername $userHome/$dir") - user(containerUsername) - } - - add(sbtGlobal.toFile, s"$userHome/.sbt") - chown(".sbt") - - user(containerUsername) - workDir(userHome) - env("LANG", "en_US.UTF-8") - env("HOME", userHome) - - val dest = s"$userHome/projects" - add(generatedProjects.projectTarget.toFile, dest) - chown("projects") - - add(ivyLocalTemp.toFile, s"$userHome/.ivy2/local/$organization") - chown(".ivy2") - - generatedProjects.projects.foreach( - generatedProject => runRaw(generatedProject.runCmd(dest)) - ) - - add(artifact.toFile, artifactTargetPath) - - add( - baseDirectory.resolve("deployment/logback.xml").toFile, - logbackConfDestination - ) - - entryPoint( - "java", - "-Xmx512M", - "-Xms512M", - s"-Dlogback.configurationFile=$logbackConfDestination", - "-jar", - artifactTargetPath + val dest = s"$userHome/projects" + + baseDockerfile("adoptopenjdk:8u292-b10-jdk-hotspot", stage.value) + .runRaw(s"""\\ + mkdir $appDir/sbt && \\ + curl -Lo /tmp/sbt-${distSbtVersion}.tgz \\ + https://github.com/sbt/sbt/releases/download/v${distSbtVersion}/sbt-${distSbtVersion}.tgz && \\ + tar -xzvf /tmp/sbt-$distSbtVersion.tgz -C $appDir/sbt && \\ + ln -s $appDir/sbt/sbt/bin/sbt /usr/local/bin/sbt && \\ + mkdir $userHome/.sbt && chown $chown $userHome/.sbt + """) + .user(username) + .workDir(userHome) + .copy(generatedProjects.projectTarget.toFile, dest, chown) + .copy(ivyLocalTemp, s"$userHome/.ivy2/local/$org", chown) + // comment the `addInstructions` below to speedup sbtRunner/ docker task when testing deploy* tasks + .addInstructions( + generatedProjects.projects.map(p => Run(p.runCmd(dest))) ) - } + .entryPoint(entrypoint.value, "-J-Xmx512M", "-J-Xms512M") } } diff --git a/project/plugins.sbt b/project/plugins.sbt index a9e8e4917..8e1d4a0f7 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,4 +1,3 @@ -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0") addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.2") addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.8.2") From e3b13290b3816040fbda91e7359caa7235a65dca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Wed, 28 Jul 2021 23:52:25 +0700 Subject: [PATCH 30/42] sbt: Reimplement sbt Deployment code + deploy, deployServer, deployQuick, deployLocal is now commandAliases instead of TaskKeys. + Add dockerCompose task + Run `server` by `docker run --network=host ...` instead of `java ...` + Fix: deployLocal fail if you don't have access to scastie-secrets github repo. + Also remove the unused RUNNER_PRODUCTION env variable --- build.sbt | 21 +- deployment/local.conf | 54 +- deployment/production.conf | 52 +- project/Deployment.scala | 738 +++++++++---------- project/DockerHelper.scala | 8 +- sbt-runner/src/main/resources/reference.conf | 1 - 6 files changed, 455 insertions(+), 419 deletions(-) diff --git a/build.sbt b/build.sbt index b3e082f1a..d3c66a7aa 100644 --- a/build.sbt +++ b/build.sbt @@ -1,5 +1,4 @@ import SbtShared._ -import com.typesafe.sbt.SbtNativePackager.Universal import DockerHelper.{serverDockerfile, runnerDockerfile} def akka(module: String) = "com.typesafe.akka" %% ("akka-" + module) % ( @@ -9,6 +8,17 @@ def akka(module: String) = "com.typesafe.akka" %% ("akka-" + module) % ( addCommandAlias("startAll", "sbtRunner/reStart;server/reStart;client/fastOptJS/startWebpackDevServer") addCommandAlias("startAllProd", "sbtRunner/reStart;server/fullOptJS/reStart") +addCommandAlias("fullBuildServer", "client/Compile/fullOptJS/webpack;server/docker") +addCommandAlias("deployLocal", "fullBuildServer;sbtRunner/docker;deployLocalQuick") + +// Deploy server and sbt instances without building and pushing docker images +addCommandAlias("deployQuick", "deployRunnersQuick;deployServerQuick") +addCommandAlias("deployRunners", "sbtRunner/dockerBuildAndPush;deployRunnersQuick") +addCommandAlias("fullBuildAndPushServer", "client/Compile/fullOptJS/webpack;server/dockerBuildAndPush") +addCommandAlias("deployServer", "fullBuildAndPushServer;deployServerQuick") +// Deploy server and sbt instances +addCommandAlias("deploy", "sbtRunner/dockerBuildAndPush;fullBuildAndPushServer;deployQuick") + lazy val scastie = project .in(file(".")) .aggregate( @@ -26,9 +36,10 @@ lazy val scastie = project .settings(baseSettings) .settings( cachedCiTestFull := { - val _ = cachedCiTestFull.value - val __ = (sbtRunner / docker / dockerfile).value - val ___ = (server / Universal / packageBin).value + cachedCiTestFull.value + (sbtRunner / docker / dockerfile).value + (client / Compile / fullOptJS / webpack).value + (server / docker / dockerfile).value }, ) .settings(Deployment.settings(server, sbtRunner)) @@ -117,7 +128,7 @@ lazy val server = project Compile / products += (client / Compile / npmUpdate / crossTarget).value / "out", reStart := reStart.dependsOn(client / Compile / fastOptJS / webpack).evaluated, fullOptJS / reStart := reStart.dependsOn(client / Compile / fullOptJS / webpack).evaluated, - Universal / packageBin := (Universal / packageBin).dependsOn(client / Compile / fullOptJS / webpack).value, + // Universal / stage := (Universal / stage).dependsOn(client / Compile / fullOptJS / webpack).value, reStart / javaOptions += "-Xmx512m", maintainer := "scalacenter", libraryDependencies ++= Seq( diff --git a/deployment/local.conf b/deployment/local.conf index 833e0a537..7a44c76a0 100644 --- a/deployment/local.conf +++ b/deployment/local.conf @@ -1,32 +1,40 @@ include "secrets.conf" -com.olegych.scastie { - balancer { - remote-hostname = "127.0.0.1" - - remote-sbt-ports-start = 5150 - remote-sbt-ports-size = 1 - - snippets-dir = snippets - old-snippets-dir = old-snippets +# configs in `deploy-config` are used only to run deploy* sbt tasks defined in project/Deployment.scala +# , not being used at runtime by both sbtRunner and server +com.olegych.scastie.deploy-config { + # pass to `docker run --network` option + # values: host or an user-defined bridge networks name + # @note host network is not supported in Mac/Window: https://docs.docker.com/network/host/ + # @note default `bride` network don't work because we need connect node-to-node by container name + # which is not support in default bridge network + # @see https://docs.docker.com/network/network-tutorial-standalone/#use-user-defined-bridge-networks + network: scastie + sbt-runners { + # first scastie sbt runner docker container name, specified in project/Deployment.scala + host = "scastie-runner-1" + ports-start = 5150 + ports-size = 1 } - web { - production = true - oauth2 { - users-file = users.txt - sessions-file = sessions.json - } + server { + host: ${akka.remote.artery.canonical.hostname} + port: ${akka.remote.artery.canonical.port} + data-mounts: [ "data:/app/data" ] } } -com.olegych.scastie.web { - hostname = "127.0.0.1" - akka-port = 15000 +com.olegych.scastie { + web { + production = true + bind.port = 9000 + } } -akka.remote.netty.tcp { - bind-hostname = "127.0.0.1" - bind-port = 15000 - hostname = "127.0.0.1" - port = 15000 +akka.remote.artery { + canonical { + # scastie server docker container name, specified in project/Deployment.scala + hostname = "scastie-server" + port = 15000 + } + bind.hostname = "0.0.0.0" # bind in docker } diff --git a/deployment/production.conf b/deployment/production.conf index 031752f51..1e039138e 100644 --- a/deployment/production.conf +++ b/deployment/production.conf @@ -1,5 +1,39 @@ include "secrets.conf" +# configs in `deploy-config` are used only to run deploy* sbt tasks defined in project/Deployment.scala +# , not being used at runtime by both sbtRunner and server +com.olegych.scastie.deploy-config { + # pass to `docker run --network` option + # values: host or an user-defined bridge networks name + # @note host network is not supported in Mac/Window: https://docs.docker.com/network/host/ + # @note default `bride` network don't work because we need connect node-to-node by container name + # which is not support in default bridge network + # @see https://docs.docker.com/network/network-tutorial-standalone/#use-user-defined-bridge-networks + network: host + sbt-runners { + // use to ssh to runner host from `server` machine + user: "scastie" + host = "scastie-sbt.scala-lang.org" + ports-start = 5150 + ports-size = 6 + } + server { + // use to ssh to server from local machine + user: "scastie" + host: ${akka.remote.artery.canonical.hostname} + port: ${akka.remote.artery.canonical.port} + + # absolute or relative to /home/scastie which is the folder when we rsync files to server + # ex: "users.txt:foo" == "/home/scastie/users.txt:foo" + data-mounts: [ + # com.olegych.scastie.web.oauth2.users-file + "users.txt:/app/data/users.txt", + # com.olegych.scastie.web.oauth2.sessions-file + "sessions.json:/app/data/sessions.json", + ] + } +} + com.olegych.scastie { balancer { snippets-container = mongo @@ -9,29 +43,13 @@ com.olegych.scastie { // sbt = n X (1024M (sbt process) + 512M (actor)) // 3 * (1200 + 512 + 512) + 3 * (1024 + 512) = 11 280 - - remote-hostname = "scastie-sbt.scala-lang.org" - - remote-sbt-ports-start = 5150 - remote-sbt-ports-size = 6 } web { production = true - oauth2 { - users-file = /home/scastie/users.txt - sessions-file = /home/scastie/sessions.json - } } } -com.olegych.scastie.web { - hostname = "scastie.scala-lang.org" - akka-port = 15000 -} - -akka.remote.netty.tcp { - bind-hostname = "scastie.scala-lang.org" - bind-port = 15000 +akka.remote.artery.canonical { hostname = "scastie.scala-lang.org" port = 15000 } diff --git a/project/Deployment.scala b/project/Deployment.scala index 58a0da320..afcbe19b4 100644 --- a/project/Deployment.scala +++ b/project/Deployment.scala @@ -1,432 +1,430 @@ import sbt._ import Keys._ -import SbtShared.gitHashNow - import java.io.File import java.nio.file._ import java.nio.file.attribute._ -import java.nio.file.StandardCopyOption.REPLACE_EXISTING - import com.typesafe.config.ConfigFactory -import com.typesafe.sbt.SbtNativePackager.Universal -import sbtdocker.DockerKeys.{docker, dockerBuildAndPush, imageNames} +import sbtdocker.DockerKeys.{docker, imageNames} +import com.typesafe.sslconfig.util.{ConfigLoader, EnrichedConfig} +import com.typesafe.sslconfig.util.ConfigLoader.playConfigLoader import sbtdocker.ImageName +import scala.collection.immutable.Seq +import scala.language.implicitConversions import sys.process._ object Deployment { - def settings(server: Project, sbtRunner: Project): Seq[Def.Setting[Task[Unit]]] = Seq( - deploy := deployTask(server, sbtRunner).value, - deployServer := deployServerTask(server, sbtRunner).value, - deployQuick := deployQuickTask(server, sbtRunner).value, + def settings(server: Project, sbtRunner: Project): Seq[Def.Setting[_]] = Seq( + deployRunnersQuick := deployRunnersQuickTask(server, sbtRunner).value, deployServerQuick := deployServerQuickTask(server, sbtRunner).value, - deployLocal := deployLocalTask(server, sbtRunner).value + deployLocalQuick := deployLocalQuickTask(server, sbtRunner).value, + dockerCompose := dockerComposeTask(server, sbtRunner).value, ) - lazy val deploy = taskKey[Unit]("Deploy server and sbt instances") - - lazy val deployServer = taskKey[Unit]("Deploy server") - - lazy val deployLocal = taskKey[Unit]("Deploy locally") - - lazy val deployQuick = taskKey[Unit]( - "Deploy server and sbt instances without building server " + - "zip and pushing docker images" + lazy val deployRunnersQuick = taskKey[Unit]("Deploy sbt runners") + lazy val deployServerQuick = taskKey[Unit]("Deploy server without building server zip") + lazy val deployLocalQuick = taskKey[Unit]("Deploy locally") + lazy val dockerCompose = taskKey[Unit]( + "Create docker-compose.yml (alternative way to deploy locally)" ) - lazy val deployServerQuick = - taskKey[Unit]("Deploy server without building server zip") - - def deployServerTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = + /** + * @note The generated docker-compose.yml file contains all options to run scastie: + * - Don't mount secrets.conf and local.conf file to container + * - Don't set -Dsentry.dsn, -Dconfig.file `docker run` options + */ + private def dockerComposeTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = Def.task { - val deployment = deploymentTask(sbtRunner).value - val serverZip = (server / Universal / packageBin).value.toPath - - deployment.deployServer(serverZip) + val log = streams.value.log + val baseDir = (ThisBuild / baseDirectory).value + + Deployer.Local(baseDir, log).write("docker-compose.yml", + s""" + |# https://www.cloudsavvyit.com/10765/how-to-simplify-docker-compose-files-with-yaml-anchors-and-extensions/ + |x-runner: &runner + | image: ${ (sbtRunner / docker / imageNames).value.head } + | environment: + | JAVA_OPTS: >- + | -Dsentry.release=${ version.value } + | -Dcom.olegych.scastie.sbt.production=true + | -Dakka.cluster.seed-nodes.0=akka://sys@server:15000 + | -Dakka.cluster.seed-nodes.1=akka://sys@sbt-runner-1:5150 + | restart: unless-stopped + |services: + | server: + | image: ${ (server / docker / imageNames).value.head } + | ports: + | - "9000:9000" + | environment: + | JAVA_OPTS: >- + | -Xmx1G + | -Dsentry.release=${ version.value } + | -Dcom.olegych.scastie.web.production=true + | -Dakka.remote.artery.canonical.hostname=server + | -Dakka.remote.artery.canonical.port=15000 + | -Dakka.cluster.seed-nodes.0=akka://sys@server:15000 + | -Dakka.cluster.seed-nodes.1=akka://sys@sbt-runner-1:5150 + | volumes: + | # /app/data = value of DATA_DIR env variable defined in DockerHelper + | - ./target:/app/data + | restart: unless-stopped + | sbt-runner-1: + | <<: *runner + | command: + | - -Dakka.remote.artery.canonical.hostname=sbt-runner-1 + | - -Dakka.remote.artery.canonical.port=5150 + |""".stripMargin) + + log.info("Created docker-compose.yml. `docker-compose up` to start scastie.") } - def deployLocalTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = - Def.task { - val deployment = deploymentTask(sbtRunner).value - val serverZip = (server / Universal / packageBin).value.toPath - val imageIdSbt = (sbtRunner / docker).value + /** @param rmi if true then delete all $label images + * rmi = false when `deployLocal`, true when `deploy` */ + private def dockerClean(label: String, rmi: Boolean = true): String = { + val rmiComment = if (rmi) "# " else "" + + s"""|#!/bin/bash -x + | + |# kill and delete all $label containers + |docker kill $$(docker ps -q -f label=$label) + | + |docker rm $$(docker ps -a -q -f label=$label) + | + |${rmiComment}docker rmi $$(docker images -q -f label=$label) + |""".stripMargin + } - deployment.deployLocal(serverZip) - } + private def serverScript( + image: ImageName, + version: String, + sentryDsn: String, + c: DeployConf, + mounts: Seq[String], + ) = + s"""|#!/bin/bash -x + | + |whoami + | + |docker run \\ + | --name scastie-server \\ + | --network=${ c.network } \\ + | --publish ${ c.server.webPort }:${ c.server.webPort } \\ + | --restart=always \\ + | -d \\ + | -v ${ mounts.mkString(" \\\n -v ") } \\ + | $image \\ + | -J-Xmx1G \\ + | -Dsentry.release=$version \\ + | -Dsentry.dsn=$sentryDsn \\ + | -Dakka.cluster.seed-nodes.0=${ c.server.akkaUri } \\ + | -Dakka.cluster.seed-nodes.1=${ c.sbtRunners.firstNodeAkkaUri } + |""".stripMargin + + // jenkins.scala-sbt.org points to 127.0.0.1 to workaround + // https://github.com/sbt/sbt/issues/5458 and https://github.com/sbt/sbt/issues/5456 + private def runnersScript(image: ImageName, version: String, sentryDsn: String, c: DeployConf) = { + import c.sbtRunners.{portsStart, portsEnd} + val arteryHostnameOpts = + if (c.network == "host") + s"-Dakka.remote.artery.canonical.hostname=${ c.sbtRunners.host }" + else """|-Dakka.remote.artery.canonical.hostname=scastie-runner-$idx \ + | -Dakka.remote.artery.bind.hostname=0.0.0.0""".stripMargin + + s"""|#!/bin/bash -x + | + |whoami + | + |# Run all instances + |for i in `seq $portsStart $portsEnd`; + |do + | idx=$$(( i - $portsStart + 1 )) + | echo "Starting scastie-runner-$$idx at port $$i" + | docker run \\ + | --add-host jenkins.scala-sbt.org:127.0.0.1 \\ + | --name scastie-runner-$$idx \\ + | --network=${ c.network } \\ + | --restart=always \\ + | -d \\ + | $image \\ + | -Dakka.remote.artery.canonical.port=$$i \\ + | $arteryHostnameOpts \\ + | -Dcom.olegych.scastie.sbt.production=true \\ + | -Dsentry.release=$version \\ + | -Dsentry.dsn=$sentryDsn \\ + | -Dakka.cluster.seed-nodes.0=${ c.server.akkaUri } \\ + | -Dakka.cluster.seed-nodes.1=${ c.sbtRunners.firstNodeAkkaUri } + |done + |""".stripMargin + } - def deployTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = + import SecretsFile.sentryDsn + private def deployLocalQuickTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = Def.task { - val deployment = deploymentTask(sbtRunner).value - val serverZip = (server / Universal / packageBin).value.toPath - val imageIdSbt = (sbtRunner / dockerBuildAndPush).value - - deployment.deploy(serverZip) - } + val baseDir = (ThisBuild / baseDirectory).value + val secretsFile = SecretsFile.local(baseDir) + val configFile = baseDir / "deployment" / "local.conf" + + val deployConf = DeployConf(configFile) + val deployer = Deployer.Local(baseDir / "local", streams.value.log) + deployer.sync( + configFile -> "application.conf", + secretsFile -> "secrets.conf" + ) - def deployQuickTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = - Def.task { - val deployment = deploymentTask(sbtRunner).value - val serverZip = serverZipTask(server).value + s"docker network ls -qf name=${ deployConf.network }".!! match { + case "" => s"docker network create --driver bridge ${ deployConf.network }".!! + case _ => // network created. Nothing to do + } + + deployer.run("clean.sh", dockerClean("scastie")) + + deployer.run("server.sh", + serverScript( + (server / docker / imageNames).value.head, + version.value, + sentryDsn(secretsFile), + deployConf, + deployConf.server.mounts(deployer.rootDir.getAbsolutePath), + ) + ) - deployment.logger.warn( - "deployQuick will not push the sbt-runner docker image nor create the server zip" + deployer.run("sbt.sh", + runnersScript( + (sbtRunner / docker / imageNames).value.head, + version.value, + sentryDsn(secretsFile), + deployConf + ) ) - deployment.deploy(serverZip) } - def deployServerQuickTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = + private def deployServerQuickTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = Def.task { - val deployment = deploymentTask(sbtRunner).value - val serverZip = serverZipTask(server).value + val log = streams.value.log + log.warn("deployServerQuick will not build and push the server docker image") + + val baseDir = (ThisBuild / baseDirectory).value + val secretsFile = SecretsFile(baseDir) + val configFile = baseDir / "deployment" / "production.conf" + + val deployConf = DeployConf(configFile) + val deployer = Deployer.Remote( + deployConf.server.host, + deployConf.server.user, + log, + (server / target).value / "remote-deployer.tmp" + ) + + deployer.sync( + configFile -> "application.conf", + secretsFile -> "secrets.conf" + ) - deployment.logger.warn( - "deployServerQuick will not create the server zip" + deployer.run("clean-server.sh", dockerClean("scastie=server")) + //TODO remove RUNNING_PID + deployer.run("server.sh", + serverScript( + (server / docker / imageNames).value.head, + version.value, + sentryDsn(secretsFile), + deployConf, + deployConf.server.mounts(deployer.home), + ) ) - deployment.deployServer(serverZip) } - private def deploymentTask( - sbtRunner: Project - ): Def.Initialize[Task[Deployment]] = + private def deployRunnersQuickTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = Def.task { - new Deployment( - rootFolder = (ThisBuild / baseDirectory).value, - version = version.value, - sbtDockerImage = (sbtRunner / docker / imageNames).value.head, - logger = streams.value.log + val log = streams.value.log + log.warn("deployServerQuick will not build and push the sbt-runner docker image") + + val baseDir = (ThisBuild / baseDirectory).value + val secretsFile = SecretsFile(baseDir) + val deployConf = DeployConf(baseDir / "deployment" / "production.conf") + + val deployer = Deployer.Remote( + deployConf.server.host, + deployConf.server.user, + log, + (server / target).value / "remote-deployer.tmp" + ) + + import deployConf.sbtRunners.{host, user} + deployer.proxyRun(host, user, "clean-sbt.sh", dockerClean("scastie=runner")) + deployer.proxyRun(host, user, "sbt.sh", + runnersScript( + (sbtRunner / docker / imageNames).value.head, + version.value, + sentryDsn(secretsFile), + deployConf + ) ) } - private def serverZipTask(server: Project): Def.Initialize[Task[Path]] = - Def.task { - val universalTarget = (server / Universal / target).value - val universalName = (server / Universal / name).value - val serverVersion = (server / version).value - (universalTarget / (universalName + "-" + serverVersion + ".zip")).toPath + private object SecretsFile { + def sentryDsn(secretsFile: File): String = + ConfigFactory + .parseFile(secretsFile) + .getString("com.olegych.scastie.sentry.dsn") + + def apply(baseDir: File): File = { + val f = baseDir.getParentFile / "scastie-secrets" / "secrets.conf" + if (! f.exists()) { + Process( + s"git clone git@github.com:scalacenter/scastie-secrets.git", + cwd = baseDir.getParentFile + ).! + } else { + // Please pull manually + // Process("git pull origin master", cwd).! + } + f } -} -class Deployment(rootFolder: File, version: String, sbtDockerImage: ImageName, val logger: Logger) { - def deploy(serverZip: Path): Unit = { - deployRunners() - deployServer(serverZip) + def local(baseDir: File): File = { + val f = baseDir / "secrets.conf" + if (!f.exists()) { + IO.write(f, + """|# Please register at sentry.io + |com.olegych.scastie.sentry.dsn="http://127.0.0.1" + |""".stripMargin) + } + f + } } - def deployLocal(serverZip: Path): Unit = { - val sbtDockerNamespace = sbtDockerImage.namespace.get - val sbtDockerRepository = sbtDockerImage.repository - - val destination = rootFolder.toPath.resolve("local") + import DeployConf._ + private case class DeployConf( + network: String, + sbtRunners: RunnerConf, + server: ServerConf, + ) - if (!Files.exists(destination)) { - Files.createDirectory(destination) + private object DeployConf { + case class RunnerConf(user: String, host: String, portsStart: Int, portsSize: Int) { + def portsEnd: Int = portsStart + portsSize - 1 + def firstNodeAkkaUri = s"akka://sys@$host:$portsStart" + } + object RunnerConf { + implicit val loader: ConfigLoader[RunnerConf] = (c: EnrichedConfig) => RunnerConf( + c.getOptional[String]("user").getOrElse("scastie"), + c.get[String]("host"), + c.get[Int]("ports-start"), + c.get[Int]("ports-size"), + ) } - val snippetsFolder = destination.resolve("snippets") + case class ServerConf(user: String, host: String, port: Int, webPort: Int, dataMounts: Seq[String]) { + def akkaUri = s"akka://sys@$host:$port" - if (!Files.exists(snippetsFolder)) { - Files.createDirectory(snippetsFolder) + def mounts(workDir: String): Seq[String] = Seq( + s"$workDir/application.conf:/app/conf/application.conf", + s"$workDir/secrets.conf:/app/conf/secrets.conf", + ) ++ dataMounts.map { + case s if s.charAt(0) == '/' => s + case s => s"$workDir/$s" + } + } + object ServerConf { + implicit val loader: ConfigLoader[ServerConf] = (c: EnrichedConfig) => ServerConf( + c.getOptional[String]("user").getOrElse("scastie"), + c.get[String]("host"), + c.get[Int]("port"), + c.getOptional[Int]("com.olegych.scastie.web.bind.port").getOrElse(9000), + c.get[Seq[String]]("data-mounts"), + ) } - val deploymentFiles = - deployServerFiles(serverZip, destination, local = true) - - deploymentFiles.files.foreach( - file => - Files - .copy(file, destination.resolve(file.getFileName), REPLACE_EXISTING) + implicit val loader: ConfigLoader[DeployConf] = (c: EnrichedConfig) => DeployConf( + c.get[String]("network"), + c.get[RunnerConf]("sbt-runners"), + c.get[ServerConf]("server") ) - val runnerScriptContent = - s"""|#!/usr/bin/env bash - | - |docker run \\ - | --network=host \\ - | -e RUNNER_PORT=5150 \\ - | -e RUNNER_HOSTNAME=127.0.0.1 \\ - | -e RUNNER_RECONNECT=false \\ - | -e RUNNER_PRODUCTION=true \\ - | $sbtDockerNamespace/$sbtDockerRepository:$gitHashNow - | - |""".stripMargin - - val runnerScript = destination.resolve("sbt.sh") - - Files.write(runnerScript, runnerScriptContent.getBytes) - setPosixFilePermissions(runnerScript, executablePermissions) - } - - def deployServer(serverZip: Path): Unit = { - val serverScriptDir = Files.createTempDirectory("server") - - val deploymentFiles = - deployServerFiles(serverZip, serverScriptDir, local = false) - - deploymentFiles.files.foreach(rsyncServer) - - val scriptFileName = deploymentFiles.serverScript.getFileName - val uri = userName + "@" + serverHostname - Process(s"ssh $uri ./$scriptFileName") ! logger - } - - case class DeploymentFiles( - secretConfig: Path, - serverZip: Path, - serverScript: Path, - productionConfig: Path, - logbackConfig: Path - ) { - def files: List[Path] = List( - secretConfig, - serverZip, - serverScript, - productionConfig, - logbackConfig - ) - } + def apply(f: File): DeployConf = EnrichedConfig( + ConfigFactory.parseFile(f).resolve() + ).get[DeployConf]("com.olegych.scastie.deploy-config") - private def deployServerFiles(serverZip: Path, destination: Path, local: Boolean): DeploymentFiles = { - logger.info("Generate server script") - - val serverScript = destination.resolve("server.sh") - - val config = - if (local) localConfig - else productionConfig - - val configFileName = config.getFileName - val logbackConfigFileName = logbackConfig.getFileName - val serverZipFileName = serverZip.getFileName.toString.replace(".zip", "") - - val secretConfig = getSecretConfig() - val sentryDsn = getSentryDsn(secretConfig) - - val baseDir = - if (!local) s"/home/$userName/" - else "" - - val content = - s"""|#!/usr/bin/env bash - | - |whoami - | - |serverZipFileName=$serverZipFileName - | - |kill -9 `cat ${baseDir}RUNNING_PID` - | - |rm -rf ${baseDir}server/* - |unzip -o -d ${baseDir}server ${baseDir}$$serverZipFileName - |mv ${baseDir}server/$$serverZipFileName/* ${baseDir}server/ - |rm -rf ${baseDir}server/$$serverZipFileName - | - |nohup ${baseDir}server/bin/server \\ - | -J-Xmx1G \\ - | -Dconfig.file=${baseDir}${configFileName} \\ - | -Dlogback.configurationFile=${baseDir}${logbackConfigFileName} \\ - | -Dsentry.dsn=$sentryDsn \\ - | -Dsentry.release=$version \\ - | &>/dev/null & - |""".stripMargin - - Files.write(serverScript, content.getBytes) - setPosixFilePermissions(serverScript, executablePermissions) - - logger.info("Deploy servers") - - DeploymentFiles( - secretConfig, - serverZip, - serverScript, - config, - logbackConfig - ) + implicit def toConfigLoader[A](f: EnrichedConfig => A): ConfigLoader[A] = playConfigLoader.map(f) } - def deployRunners(): Unit = { - val sbtDockerNamespace = sbtDockerImage.namespace.get - val sbtDockerRepository = sbtDockerImage.repository + private sealed trait Deployer { + def write(path: String, content: String, executable: Boolean = false): Unit + def sync(f: File, newName: String): Unit + def run(scriptName: String, scriptContent: String): Unit - killRunners() + final def sync(sources: (File, String)*): Unit = sources.foreach { + case (f, newName) => sync(f, newName) + } - deployRunners( - "sbt", - s"$sbtDockerNamespace/$sbtDockerRepository", - sbtRunnersPortsStart, - sbtRunnersPortsSize + protected def log: Logger + protected def processLog(name: String): ProcessLogger = ProcessLogger( + out => log.info(s"[$name] $out"), + err => log.warn(s"[$name] $err") ) } - def killRunners(): Unit = { - val killScriptDir = Files.createTempDirectory("kill") - val killScript = killScriptDir.resolve("kill.sh") - - logger.info(s"Generate kill script") - - val killScriptContent = - """|#!/usr/bin/env bash - | - |# Delete all containers - |docker rm $(docker ps -a -q) - | - |# Delete all images - |docker rmi $(docker images -q) - | - |docker kill $(docker ps -q) - |""".stripMargin - - Files.write(killScript, killScriptContent.getBytes) - setPosixFilePermissions(killScript, executablePermissions) - val scriptFileName = killScript.getFileName - - val runnerUri = userName + "@" + runnersHostname - val serverUri = userName + "@" + serverHostname - - val proxyScript = killScriptDir.resolve("kill-proxy.sh") - val proxyScriptFileName = proxyScript.getFileName - - val proxyScriptContent = - s"""|rm kill-proxy.sh - |rsync $scriptFileName $runnerUri:$scriptFileName - |ssh $runnerUri ./$scriptFileName - |rm $scriptFileName""".stripMargin - - Files.write(proxyScript, proxyScriptContent.getBytes) - setPosixFilePermissions(proxyScript, executablePermissions) - - rsyncServer(killScript) - rsyncServer(proxyScript) - Process(s"ssh $serverUri ./$proxyScriptFileName") ! logger - } - - def deployRunners(runner: String, image: String, runnersPortsStart: Int, runnersPortsSize: Int): Unit = { - - val runnerScriptDir = Files.createTempDirectory(runner) - val runnerScript = runnerScriptDir.resolve(runner + ".sh") - - logger.info(s"Generate $runner script") - - val runnersPortsEnd = runnersPortsStart + (runnersPortsSize - 1) - - val dockerImagePath = s"$image:$gitHashNow" - - val sentryDsn = getSentryDsn(getSecretConfig()) - - //jenkins.scala-sbt.org points to 127.0.0.1 to workaround https://github.com/sbt/sbt/issues/5458 and https://github.com/sbt/sbt/issues/5456 - val runnerScriptContent = - s"""|#!/usr/bin/env bash - | - |whoami - | - | - |docker rmi -f $dockerImagePath - | - |# Run all instances - |for i in `seq $runnersPortsStart $runnersPortsEnd`; - |do - | echo "Starting Runner: Port $$i" - | docker run \\ - | --add-host jenkins.scala-sbt.org:127.0.0.1 \\ - | --network=host \\ - | --restart=always \\ - | -d \\ - | -e RUNNER_PRODUCTION=true \\ - | -e RUNNER_PORT=$$i \\ - | -e SERVER_HOSTNAME=$serverHostname \\ - | -e SERVER_AKKA_PORT=$serverAkkaPort \\ - | -e RUNNER_HOSTNAME=$runnersHostname \\ - | -e SENTRY_DSN=$sentryDsn \\ - | -e SENTRY_RELEASE=$version \\ - | $dockerImagePath - |done - |""".stripMargin - - Files.write(runnerScript, runnerScriptContent.getBytes) - setPosixFilePermissions(runnerScript, executablePermissions) - val scriptFileName = runnerScript.getFileName - - val runnerUri = userName + "@" + runnersHostname - val serverUri = userName + "@" + serverHostname - - val proxyScript = runnerScriptDir.resolve(runner + "-proxy.sh") - val proxyScriptFileName = proxyScript.getFileName - - val proxyScriptContent = - s"""|rm ${runner}-proxy.sh - |rsync $scriptFileName $runnerUri:$scriptFileName - |ssh $runnerUri ./$scriptFileName - |rm $scriptFileName""".stripMargin - - Files.write(proxyScript, proxyScriptContent.getBytes) - setPosixFilePermissions(proxyScript, executablePermissions) - - rsyncServer(runnerScript) - rsyncServer(proxyScript) - Process(s"ssh $serverUri ./$proxyScriptFileName") ! logger - } - - private def getSecretConfig(): Path = { - val scastieSecrets = "scastie-secrets" - val secretFolder = rootFolder / ".." / scastieSecrets - - if (Files.exists(secretFolder.toPath)) { - Process("git pull origin master", secretFolder) - } else { - Process(s"git clone git@github.com:scalacenter/$scastieSecrets.git") + private object Deployer { + case class Local(rootDir: File, log: Logger) extends Deployer { + override def write(path: String, content: String, executable: Boolean = false): Unit = { + val f = rootDir / path + IO.write(f, content) + if (executable) setExecutable(f) + } + + override def sync(f: File, newName: String): Unit = { + val newFile = rootDir / newName + IO.createDirectory(newFile.getParentFile) + IO.copyFile(f, newFile) + } + + override def run(scriptName: String, scriptContent: String): Unit = { + log.info(s"Generate $scriptName script") + write(scriptName, scriptContent, executable = true) + (rootDir / scriptName).absolutePath ! processLog(scriptName) + } } - (secretFolder / "secrets.conf").toPath - } - - private def getSentryDsn(secretConfig: Path): String = { - val config = ConfigFactory.parseFile(secretConfig.toFile) - val scastieConfig = config.getConfig("com.olegych.scastie") - scastieConfig.getString("sentry.dsn") - } - - private val userName = "scastie" - - private val deploymentFolder = rootFolder / "deployment" - - private val productionConfig = (deploymentFolder / "production.conf").toPath - private val localConfig = (deploymentFolder / "local.conf").toPath - - private val logbackConfig = (deploymentFolder / "logback.xml").toPath - - private val config = - ConfigFactory.parseFile(productionConfig.toFile) - - val balancerConfig = config.getConfig("com.olegych.scastie.balancer") - - private val serverConfig = config.getConfig("com.olegych.scastie.web") - private val serverHostname = serverConfig.getString("hostname") - private val serverAkkaPort = serverConfig.getInt("akka-port") - - private val runnersHostname = balancerConfig.getString("remote-hostname") - - private val sbtRunnersPortsStart = - balancerConfig.getInt("remote-sbt-ports-start") - private val sbtRunnersPortsSize = - balancerConfig.getInt("remote-sbt-ports-size") - - private val executablePermissions = - PosixFilePermissions.fromString("rwxr-xr-x") - - private def rsync(file: Path, userName: String, hostname: String, logger: Logger): Unit = { - val uri = userName + "@" + hostname - val fileName = file.getFileName - Process(s"rsync $file $uri:$fileName") ! logger - } + case class Remote(host: String, user: String, log: Logger, tempFile: File) extends Deployer { + val home = s"/home/$user" + val uri: String = s"$user@$host" + + override def write(path: String, content: String, executable: Boolean = false): Unit = { + IO.write(tempFile, content) + if (executable) setExecutable(tempFile) + sync(tempFile, path) + IO.delete(tempFile) + } + + override def sync(f: File, newName: String): Unit = + s"rsync $f $uri:$newName" ! processLog("rsync") + + override def run(scriptName: String, scriptContent: String): Unit = { + log.info(s"Generate $scriptName script") + write(scriptName, scriptContent, executable = true) + s"ssh $uri ./$scriptName" ! processLog(scriptName) + } + + /** run `scriptContent` on `remoteHost` by: + * + ssh to this `host` + * + then, from this `host`, ssh to `remoteHost` and run */ + def proxyRun(remoteHost: String, remoteUser: String, scriptName: String, scriptContent: String): Unit = { + log.info(s"Generate $scriptName script") + write(scriptName, scriptContent, executable = true) + + val remoteUri = s"$remoteUser@$remoteHost" + run(s"proxy-$scriptName", + s"""|rsync $scriptName $remoteUri:$scriptName + |ssh $remoteUri ./$scriptName + |rm $scriptName + |""".stripMargin) + } + } - private def rsyncServer(file: Path) = - rsync(file, userName, serverHostname, logger) + private val executablePermissions = PosixFilePermissions.fromString("rwxr-xr-x") - private def setPosixFilePermissions( - path: Path, - perms: java.util.Set[PosixFilePermission] - ): Path = { - try Files.setPosixFilePermissions(path, perms) - catch { - case e: Exception => path - } + private def setExecutable(f: File): File = + try Files.setPosixFilePermissions(f.toPath, executablePermissions).toFile + catch { case _: Exception => f } } } diff --git a/project/DockerHelper.scala b/project/DockerHelper.scala index 8cd8df468..1d77203cf 100644 --- a/project/DockerHelper.scala +++ b/project/DockerHelper.scala @@ -4,7 +4,7 @@ import SbtShared._ import com.typesafe.sbt.SbtNativePackager import com.typesafe.sbt.SbtNativePackager.Universal import com.typesafe.sbt.packager.universal.UniversalPlugin -import com.typesafe.sbt.packager.Keys.{bashScriptExtraDefines, executableScriptName, stage} +import com.typesafe.sbt.packager.Keys.{bashScriptDefines, executableScriptName, scriptClasspath, stage} import com.typesafe.sbt.packager.archetypes.scripts.BashStartScriptPlugin import sbtdocker.DockerPlugin import sbtdocker.DockerPlugin.autoImport.{ImageName, docker, imageNames} @@ -34,8 +34,8 @@ object DockerHelper extends AutoPlugin { tag = Some(gitHashNow) ) ), - // https://www.scala-sbt.org/sbt-native-packager/archetypes/cheatsheet.html#extra-defines - bashScriptExtraDefines += s"""addJava "-Dlogback.configurationFile=$appDir/conf/logback.xml"""", + // Add to classPath so /app/conf/{logback.xml, application.conf} will auto be picked + bashScriptDefines / scriptClasspath += s"$appDir/conf/", Universal / mappings += ( (ThisBuild / baseDirectory).value / "deployment" / "logback.xml" -> "conf/logback.xml" ), @@ -59,6 +59,7 @@ object DockerHelper extends AutoPlugin { def serverDockerfile(): Def.Initialize[Task[Dockerfile]] = Def.task { baseDockerfile("adoptopenjdk:8u292-b10-jre-hotspot", stage.value) + .label("scastie" -> "server") .user(username) .workDir(appDir) .env("DATA_DIR" -> s"$appDir/data") @@ -104,6 +105,7 @@ object DockerHelper extends AutoPlugin { val dest = s"$userHome/projects" baseDockerfile("adoptopenjdk:8u292-b10-jdk-hotspot", stage.value) + .label("scastie" -> "runner") .runRaw(s"""\\ mkdir $appDir/sbt && \\ curl -Lo /tmp/sbt-${distSbtVersion}.tgz \\ diff --git a/sbt-runner/src/main/resources/reference.conf b/sbt-runner/src/main/resources/reference.conf index 92819b646..9dc8ce0a7 100644 --- a/sbt-runner/src/main/resources/reference.conf +++ b/sbt-runner/src/main/resources/reference.conf @@ -2,7 +2,6 @@ com.olegych.scastie.sbt { sbtReloadTimeout = 100s runTimeout = 30s production = false - production = ${?RUNNER_PRODUCTION} } akka { From 6f9d1a3d2fe41f23cb248b2cbae7bff5dbda6a9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 13:42:10 +0700 Subject: [PATCH 31/42] mongoUri configurable and `snippets-container` config break change --- balancer/src/main/resources/reference.conf | 10 ++++---- .../DispatchActor.scala | 23 ++++++++++--------- deployment/production.conf | 5 +++- .../MongoDBSnippetsContainer.scala | 8 +++---- .../SnippetsContainerTest.scala | 5 +++- 5 files changed, 30 insertions(+), 21 deletions(-) diff --git a/balancer/src/main/resources/reference.conf b/balancer/src/main/resources/reference.conf index c296b9375..8a71d256e 100644 --- a/balancer/src/main/resources/reference.conf +++ b/balancer/src/main/resources/reference.conf @@ -1,14 +1,16 @@ com.olegych.scastie { # default parent dir of: - # + balancer.{snippets-dir, old-snippets-dir} - if use files snippets-container + # + snippets-container.{snippets-dir, old-snippets-dir, type = files} # + web.oauth2.{users-file, sessions-file} data-dir = ./target data-dir = ${?DATA_DIR} } com.olegych.scastie.balancer { - snippets-container = files - snippets-dir = ${com.olegych.scastie.data-dir}/snippets/ - old-snippets-dir = ${com.olegych.scastie.data-dir}/old-snippets/ + snippets-container { + type = files + snippets-dir = ${com.olegych.scastie.data-dir}/snippets/ + old-snippets-dir = ${com.olegych.scastie.data-dir}/old-snippets/ + } } akka { diff --git a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala index 8ced5a125..921b9dafc 100644 --- a/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala +++ b/balancer/src/main/scala/com.olegych.scastie.balancer/DispatchActor.scala @@ -104,7 +104,8 @@ class DispatchActor( private val container = config.snippetsContainer match { case SnippetsType.Memory => new InMemorySnippetsContainer - case SnippetsType.Mongo => new MongoDBSnippetsContainer(ExecutionContext.fromExecutor(Executors.newWorkStealingPool())) + case SnippetsType.Mongo(uri) => + new MongoDBSnippetsContainer(uri, ExecutionContext.fromExecutor(Executors.newWorkStealingPool())) case f: SnippetsType.Files => new FilesSnippetsContainer( f.snippetsDir, @@ -289,16 +290,10 @@ case class BalancerConf( object BalancerConf { import SnippetsType._ implicit val loader: ConfigLoader[BalancerConf] = (c: EnrichedConfig) => BalancerConf( - c.get[String]("snippets-container") match { + c.get[String]("snippets-container.type") match { + case "mongo" => Mongo(c.get[String]("snippets-container.uri")) + case "files" => c.get[Files]("snippets-container") case "memory" => Memory - case "mongo" => Mongo - case "files" => Files( - c.get[Path]("snippets-dir"), - c.get[Path]("old-snippets-dir"), - ) - case _ => - println("fallback to in-memory container") - Memory } ) } @@ -307,10 +302,16 @@ sealed trait SnippetsType object SnippetsType { case object Memory extends SnippetsType - case object Mongo extends SnippetsType + case class Mongo(uri: String) extends SnippetsType case class Files( snippetsDir: Path, oldSnippetsDir: Path, ) extends SnippetsType + object Files { + implicit val loader: ConfigLoader[Files] = (c: EnrichedConfig) => Files( + c.get[Path]("snippets-dir"), + c.get[Path]("old-snippets-dir"), + ) + } } diff --git a/deployment/production.conf b/deployment/production.conf index 1e039138e..f9baf0760 100644 --- a/deployment/production.conf +++ b/deployment/production.conf @@ -36,7 +36,10 @@ com.olegych.scastie.deploy-config { com.olegych.scastie { balancer { - snippets-container = mongo + snippets-container { + type = mongo + uri = "mongodb://localhost:27017/snippets" + } // server 1536M # 12GB RAM diff --git a/storage/src/main/scala/com.olegych.scastie.storage/MongoDBSnippetsContainer.scala b/storage/src/main/scala/com.olegych.scastie.storage/MongoDBSnippetsContainer.scala index 4d9a8b555..4be2ee5d8 100644 --- a/storage/src/main/scala/com.olegych.scastie.storage/MongoDBSnippetsContainer.scala +++ b/storage/src/main/scala/com.olegych.scastie.storage/MongoDBSnippetsContainer.scala @@ -46,10 +46,10 @@ object MongoSnippet { implicit val formatMongoSnippet: OFormat[MongoSnippet] = Json.format[MongoSnippet] } -class MongoDBSnippetsContainer(_ec: ExecutionContext) extends SnippetsContainer { - protected implicit val ec: ExecutionContext = _ec - - private val mongoUri = "mongodb://localhost:27017/snippets" +class MongoDBSnippetsContainer( + private val mongoUri: String, + protected implicit val ec: ExecutionContext, +) extends SnippetsContainer { private val driver = AsyncDriver() private val connection = for { parsedUri <- MongoConnection.fromString(mongoUri) diff --git a/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala b/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala index d8904ee78..4e6e31070 100644 --- a/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala +++ b/storage/src/test/scala/com.olegych.scastie.storage/SnippetsContainerTest.scala @@ -20,7 +20,10 @@ class SnippetsContainerTest extends AnyFunSuite with BeforeAndAfterAll { val root = Files.createTempDirectory("test") val oldRoot = Files.createTempDirectory("old-test") - private lazy val mongoContainer = new MongoDBSnippetsContainer(scala.concurrent.ExecutionContext.Implicits.global) + private lazy val mongoContainer = new MongoDBSnippetsContainer( + mongoUri = "mongodb://localhost:27017/snippets", + ec = scala.concurrent.ExecutionContext.Implicits.global + ) private val testContainer: SnippetsContainer = { if (mongo) mongoContainer From 00e3f13fa0e845fe000ea2c8677ae2982a2cb51b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 15:32:32 +0700 Subject: [PATCH 32/42] ServerMain: Don't support passing port as args.head --- .../scala/com.olegych.scastie.web/ServerMain.scala | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala index 37ee431bb..e418cb4b2 100644 --- a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala +++ b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala @@ -29,10 +29,8 @@ object ServerMain { val webConf = config.get[WebConf]("web") val balancerConf = config.get[BalancerConf]("balancer") - val port = args.headOption.map(_.toInt).getOrElse(webConf.bind.port) - val system = ActorSystem[Nothing]( - Guardian(webConf, balancerConf, port), + Guardian(webConf, balancerConf), config.get[String]("system-name") ) @@ -42,17 +40,15 @@ object ServerMain { | canonical | bind |} - |com.olegych.scastie.web.bind { - | hostname - | port = $port - |}""".stripMargin)) + |com.olegych.scastie.web.bind + |""".stripMargin)) Await.result(system.whenTerminated, Duration.Inf) } } private object Guardian { - def apply(webCfg: WebConf, balancerCfg: BalancerConf, port: Int): Behavior[Nothing] = + def apply(webCfg: WebConf, balancerCfg: BalancerConf): Behavior[Nothing] = Behaviors.setup[Nothing] { context => import context.spawn implicit def system: ActorSystem[Nothing] = context.system @@ -99,7 +95,7 @@ private object Guardian { Await.result( Http() - .newServerAt(webCfg.bind.hostname, port) + .newServerAt(webCfg.bind.hostname, webCfg.bind.port) .bindFlow(routes), 1.seconds) Behaviors.empty From f40e70af42331032dde4ee4617467a49257bfc52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 15:33:49 +0700 Subject: [PATCH 33/42] ShowConfig: Remove overridden support --- .../main/scala/com.olegych.scastie/util/ShowConfig.scala | 6 ------ .../scala/com.olegych.scastie.util/ShowConfigTest.scala | 4 ---- 2 files changed, 10 deletions(-) diff --git a/utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala b/utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala index da3b9914b..fd9721e6b 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/ShowConfig.scala @@ -25,12 +25,6 @@ object ShowConfig { case s if s.trim.startsWith("}") => groups.pop() s - // override - case s if s.contains(':') || s.contains('=') => - val Array(path, newValue) = s.split(Array(':', '=')) - val value = valueAt(path) - if (newValue.trim == value) s - else s"$s # Overridden. Old value = $value" // normal path case s => val leadingSpaces = "\n" + s.takeWhile(_ == ' ') diff --git a/utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala b/utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala index 632a4f3b6..da2586dc3 100644 --- a/utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala +++ b/utils/src/test/scala/com.olegych.scastie.util/ShowConfigTest.scala @@ -52,8 +52,4 @@ class ShowConfigTest extends AnyFunSuite with Matchers { | d2.x: 1 |}""".stripMargin } - - test("with overridden") { - ShowConfig(config, "d.d2.x = 2") mustBe "d.d2.x = 2 # Overridden. Old value = 1" - } } From 54d448d7fe99da619498924dba867a45dd2f7cd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 15:44:21 +0700 Subject: [PATCH 34/42] Remove writing RUNNING_PID Don't need this logic as we are now running scastie in docker --- project/Deployment.scala | 1 - .../scala/com.olegych.scastie.sbt/SbtActor.scala | 2 +- .../main/scala/com.olegych.scastie.sbt/SbtMain.scala | 6 ------ .../scala/com.olegych.scastie.web/ServerMain.scala | 6 +----- .../com.olegych.scastie/util/ScastieFileUtil.scala | 12 ------------ 5 files changed, 2 insertions(+), 25 deletions(-) diff --git a/project/Deployment.scala b/project/Deployment.scala index afcbe19b4..00c6c0026 100644 --- a/project/Deployment.scala +++ b/project/Deployment.scala @@ -221,7 +221,6 @@ object Deployment { ) deployer.run("clean-server.sh", dockerClean("scastie=server")) - //TODO remove RUNNING_PID deployer.run("server.sh", serverScript( (server / docker / imageNames).value.head, diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala index e8b67496e..5d95c6a8e 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala @@ -64,7 +64,7 @@ class SbtActor private ( } case class SbtConf( - production: Boolean, + production: Boolean, // TODO remove runTimeout: FiniteDuration, sbtReloadTimeout: FiniteDuration, ) diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala index 4693a5856..8e1efaabe 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtMain.scala @@ -1,7 +1,6 @@ package com.olegych.scastie.sbt import akka.actor.typed.scaladsl.Behaviors -import com.olegych.scastie.util.ScastieFileUtil.writeRunningPid import com.olegych.scastie.util.ShowConfig import com.typesafe.sslconfig.util.EnrichedConfig import akka.actor.typed.{ActorSystem, Behavior} @@ -20,11 +19,6 @@ object SbtMain { ) val sbtConf = config.get[SbtConf]("sbt") - if (sbtConf.production) { - val pid = writeRunningPid() - logger.info(s"Starting sbtRunner pid: $pid") - } - val system = ActorSystem[Nothing]( Guardian(sbtConf), config.get[String]("system-name") diff --git a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala index e418cb4b2..568b0c17e 100644 --- a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala +++ b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala @@ -3,7 +3,7 @@ package com.olegych.scastie.web import com.olegych.scastie.web.routes._ import com.olegych.scastie.web.oauth2._ import com.olegych.scastie.balancer._ -import com.olegych.scastie.util.{ShowConfig, ScastieFileUtil} +import com.olegych.scastie.util.ShowConfig import com.typesafe.sslconfig.util.{ConfigLoader, EnrichedConfig} import com.olegych.scastie.util.ConfigLoaders._ import akka.http.scaladsl._ @@ -55,10 +55,6 @@ private object Guardian { implicit def ec: ExecutionContext = context.system.executionContext implicit def sc: Scheduler = context.system.scheduler - if (webCfg.production) { - ScastieFileUtil.writeRunningPid() - } - val github = new Github(webCfg.oauth2) val session = new GithubUserSession( webCfg, diff --git a/utils/src/main/scala/com.olegych.scastie/util/ScastieFileUtil.scala b/utils/src/main/scala/com.olegych.scastie/util/ScastieFileUtil.scala index 28e35f8d7..35cf4e7fb 100644 --- a/utils/src/main/scala/com.olegych.scastie/util/ScastieFileUtil.scala +++ b/utils/src/main/scala/com.olegych.scastie/util/ScastieFileUtil.scala @@ -1,8 +1,6 @@ package com.olegych.scastie.util import java.nio.file._ -import java.lang.management.ManagementFactory -import java.nio.charset.StandardCharsets object ScastieFileUtil { def slurp(src: Path): Option[String] = { @@ -22,14 +20,4 @@ object ScastieFileUtil { () } } - - def writeRunningPid(): String = { - val pid = ManagementFactory.getRuntimeMXBean.getName.split("@").head - val pidFile = Paths.get("RUNNING_PID") - Files.write(pidFile, pid.getBytes(StandardCharsets.UTF_8)) - sys.addShutdownHook { - Files.delete(pidFile) - } - pid - } } From cf4b0c3242f188dab2c58b46c13d0d7455213060 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 15:56:58 +0700 Subject: [PATCH 35/42] Simplify SbtProcess's params --- .../com.olegych.scastie.sbt/SbtActor.scala | 7 +----- .../com.olegych.scastie.sbt/SbtProcess.scala | 24 ++++++------------- .../SbtActorTest.scala | 10 ++++---- 3 files changed, 14 insertions(+), 27 deletions(-) diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala index 5d95c6a8e..b792d2f15 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala @@ -42,12 +42,7 @@ class SbtActor private ( private val sbtRunner = context.spawn( - SbtProcess( - config.runTimeout, - config.sbtReloadTimeout, - config.production, - javaOptions = Seq("-Xms512m", "-Xmx1g") - ), + SbtProcess(config, Seq("-Xms512m", "-Xmx1g")), name = "SbtRunner" ) diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala index 15d5d31bf..22e27484d 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala @@ -57,17 +57,12 @@ object SbtProcess { /** Let it die and restart the actor */ final class LetItDie(msg: String) extends Exception(msg) - def apply(runTimeout: FiniteDuration, - reloadTimeout: FiniteDuration, - isProduction: Boolean, - javaOptions: Seq[String], - customSbtDir: Option[Path] = None - ): Behavior[Event] = + def apply(conf: SbtConf, javaOptions: Seq[String]): Behavior[Event] = Behaviors.withStash(100) { buffer => Behaviors.supervise[Event] { Behaviors.setup { ctx => Behaviors.withTimers { timers => - new SbtProcess(runTimeout, reloadTimeout, isProduction, javaOptions, customSbtDir)(ctx, buffer, timers)() + new SbtProcess(conf, javaOptions)(ctx, buffer, timers)() } } }.onFailure(SupervisorStrategy.restart) @@ -76,11 +71,7 @@ object SbtProcess { import SbtProcess._ class SbtProcess private ( - runTimeout: FiniteDuration, - reloadTimeout: FiniteDuration, - isProduction: Boolean, - javaOptions: Seq[String], - customSbtDir: Option[Path] + conf: SbtConf, javaOptions: Seq[String] )(context: ActorContext[Event], buffer: StashBuffer[Event], timers: TimerScheduler[Event]) { import ProcessActor._ import context.{executionContext, log} @@ -104,8 +95,7 @@ class SbtProcess private ( } } - private val sbtDir: Path = - customSbtDir.getOrElse(Files.createTempDirectory("scastie")) + private val sbtDir: Path = Files.createTempDirectory("scastie") private val buildFile = sbtDir.resolve("build.sbt") private val promptUniqueId = Random.alphanumeric.take(10).mkString @@ -222,7 +212,7 @@ class SbtProcess private ( if (isReloading) { process ! Input("reload;compile/compileInputs") - gotoWithTimeout(sbtRun, reloading, SbtStateTimeout(reloadTimeout, "updating build configuration")) + gotoWithTimeout(sbtRun, reloading, SbtStateTimeout(conf.sbtReloadTimeout, "updating build configuration")) } else { gotoRunning(sbtRun) } @@ -238,7 +228,7 @@ class SbtProcess private ( val extractor = new OutputExtractor( scalaJsContent _, scalaJsSourceMapContent _, - isProduction, + conf.production, promptUniqueId ) @@ -300,7 +290,7 @@ class SbtProcess private ( private def gotoRunning(sbtRun: SbtRun): Behavior[Event] = { process ! Input(sbtRun.inputs.target.sbtRunCommand(sbtRun.inputs.isWorksheetMode)) - gotoWithTimeout(sbtRun, running, SbtStateTimeout(runTimeout, "running code")) + gotoWithTimeout(sbtRun, running, SbtStateTimeout(conf.runTimeout, "running code")) } private def isPrompt(line: String): Boolean = { diff --git a/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala b/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala index b382c946e..cc7254e0c 100644 --- a/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala +++ b/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala @@ -1,6 +1,6 @@ package com.olegych.scastie.sbt -import akka.actor.{ActorRef, ActorSystem, Props} +import akka.actor.{ActorRef, ActorSystem} import akka.testkit.TestActor.AutoPilot import akka.testkit.{ImplicitSender, TestKit, TestProbe} import com.olegych.scastie.api._ @@ -239,9 +239,11 @@ class SbtActorTest() extends TestKit(ActorSystem("SbtActorTest")) with ImplicitS // https://stackoverflow.com/questions/18335127/testing-akka-actors-that-mixin-stash-with-testactorref private val sbtActor = system.spawn( SbtProcess( - runTimeout = timeout, - reloadTimeout = 20.seconds, - isProduction = false, + SbtConf( + production = false, + runTimeout = timeout, + sbtReloadTimeout = 20.seconds + ), javaOptions = Seq("-Xms51m", "-Xmx550m") ), name = "SbtRunner-test" From f93d7dbe15f00cf7f5133a1f1f04cec2b90f0a23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 16:20:18 +0700 Subject: [PATCH 36/42] Change config web|sbt.production => embedded-url-base|remapSourceMapUrlBase --- deployment/local.conf | 3 ++- deployment/production.conf | 3 ++- project/Deployment.scala | 13 +++++++++---- sbt-runner/src/main/resources/reference.conf | 2 +- .../com.olegych.scastie.sbt/OutputExtractor.scala | 8 ++------ .../scala/com.olegych.scastie.sbt/SbtActor.scala | 4 ++-- .../scala/com.olegych.scastie.sbt/SbtProcess.scala | 2 +- .../com.olegych.scastie.sbt/SbtActorTest.scala | 2 +- server/src/main/resources/reference.conf | 2 +- .../scala/com.olegych.scastie.web/ServerMain.scala | 6 +++--- .../routes/FrontPageRoutes.scala | 6 +----- 11 files changed, 25 insertions(+), 26 deletions(-) diff --git a/deployment/local.conf b/deployment/local.conf index 7a44c76a0..9adc39546 100644 --- a/deployment/local.conf +++ b/deployment/local.conf @@ -15,6 +15,7 @@ com.olegych.scastie.deploy-config { host = "scastie-runner-1" ports-start = 5150 ports-size = 1 + remapSourceMapUrlBase = "http://localhost:9000" } server { host: ${akka.remote.artery.canonical.hostname} @@ -25,7 +26,7 @@ com.olegych.scastie.deploy-config { com.olegych.scastie { web { - production = true + embedded-url-base = "http://localhost:9000" bind.port = 9000 } } diff --git a/deployment/production.conf b/deployment/production.conf index f9baf0760..b7bf075a5 100644 --- a/deployment/production.conf +++ b/deployment/production.conf @@ -16,6 +16,7 @@ com.olegych.scastie.deploy-config { host = "scastie-sbt.scala-lang.org" ports-start = 5150 ports-size = 6 + remapSourceMapUrlBase = "https://scastie.scala-lang.org" } server { // use to ssh to server from local machine @@ -48,7 +49,7 @@ com.olegych.scastie { // 3 * (1200 + 512 + 512) + 3 * (1024 + 512) = 11 280 } web { - production = true + embedded-url-base = "https://scastie.scala-lang.org" } } diff --git a/project/Deployment.scala b/project/Deployment.scala index 00c6c0026..92723a29e 100644 --- a/project/Deployment.scala +++ b/project/Deployment.scala @@ -46,7 +46,7 @@ object Deployment { | environment: | JAVA_OPTS: >- | -Dsentry.release=${ version.value } - | -Dcom.olegych.scastie.sbt.production=true + | -Dcom.olegych.scastie.sbt.remapSourceMapUrlBase="http://localhost:9000" | -Dakka.cluster.seed-nodes.0=akka://sys@server:15000 | -Dakka.cluster.seed-nodes.1=akka://sys@sbt-runner-1:5150 | restart: unless-stopped @@ -59,7 +59,7 @@ object Deployment { | JAVA_OPTS: >- | -Xmx1G | -Dsentry.release=${ version.value } - | -Dcom.olegych.scastie.web.production=true + | -Dcom.olegych.scastie.web.embedded-url-base="http://localhost:9000" | -Dakka.remote.artery.canonical.hostname=server | -Dakka.remote.artery.canonical.port=15000 | -Dakka.cluster.seed-nodes.0=akka://sys@server:15000 @@ -148,7 +148,7 @@ object Deployment { | $image \\ | -Dakka.remote.artery.canonical.port=$$i \\ | $arteryHostnameOpts \\ - | -Dcom.olegych.scastie.sbt.production=true \\ + | -Dcom.olegych.scastie.sbt.remapSourceMapUrlBase="${ c.sbtRunners.remapSourceMapUrlBase }" \\ | -Dsentry.release=$version \\ | -Dsentry.dsn=$sentryDsn \\ | -Dakka.cluster.seed-nodes.0=${ c.server.akkaUri } \\ @@ -300,7 +300,11 @@ object Deployment { ) private object DeployConf { - case class RunnerConf(user: String, host: String, portsStart: Int, portsSize: Int) { + case class RunnerConf( + user: String, host: String, + portsStart: Int, portsSize: Int, + remapSourceMapUrlBase: String, + ) { def portsEnd: Int = portsStart + portsSize - 1 def firstNodeAkkaUri = s"akka://sys@$host:$portsStart" } @@ -310,6 +314,7 @@ object Deployment { c.get[String]("host"), c.get[Int]("ports-start"), c.get[Int]("ports-size"), + c.get[String]("remapSourceMapUrlBase"), ) } diff --git a/sbt-runner/src/main/resources/reference.conf b/sbt-runner/src/main/resources/reference.conf index 9dc8ce0a7..348f6a858 100644 --- a/sbt-runner/src/main/resources/reference.conf +++ b/sbt-runner/src/main/resources/reference.conf @@ -1,7 +1,7 @@ com.olegych.scastie.sbt { sbtReloadTimeout = 100s runTimeout = 30s - production = false + remapSourceMapUrlBase = "http://localhost:9000" } akka { diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/OutputExtractor.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/OutputExtractor.scala index 72d7ac17f..ef912206f 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/OutputExtractor.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/OutputExtractor.scala @@ -12,7 +12,7 @@ import scala.util.control.NonFatal class OutputExtractor(getScalaJsContent: () => Option[String], getScalaJsSourceMapContent: () => Option[String], - isProduction: Boolean, + remapSourceMapUrlBase: String, promptUniqueId: String) { private val log = LoggerFactory.getLogger(getClass) @@ -111,11 +111,7 @@ class OutputExtractor(getScalaJsContent: () => Option[String], sources = sourceMap.sources.map( source => if (source.startsWith(ScalaTarget.Js.sourceUUID)) { - val host = - if (isProduction) "https://scastie.scala-lang.org" - else "http://localhost:9000" - - host + snippetId.scalaJsUrl(ScalaTarget.Js.sourceFilename) + remapSourceMapUrlBase + snippetId.scalaJsUrl(ScalaTarget.Js.sourceFilename) } else source ) ) diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala index b792d2f15..6d5aafad7 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtActor.scala @@ -59,14 +59,14 @@ class SbtActor private ( } case class SbtConf( - production: Boolean, // TODO remove + remapSourceMapUrlBase: String, runTimeout: FiniteDuration, sbtReloadTimeout: FiniteDuration, ) object SbtConf { implicit val loader: ConfigLoader[SbtConf] = (c: EnrichedConfig) => SbtConf( - c.get[Boolean]("production"), + c.get[String]("remapSourceMapUrlBase"), c.get[FiniteDuration]("runTimeout"), c.get[FiniteDuration]("sbtReloadTimeout") ) diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala index 22e27484d..4d4692242 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala @@ -228,7 +228,7 @@ class SbtProcess private ( val extractor = new OutputExtractor( scalaJsContent _, scalaJsSourceMapContent _, - conf.production, + conf.remapSourceMapUrlBase, promptUniqueId ) diff --git a/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala b/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala index cc7254e0c..28b894295 100644 --- a/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala +++ b/sbt-runner/src/test/scala/com.olegych.scastie.sbt/SbtActorTest.scala @@ -240,7 +240,7 @@ class SbtActorTest() extends TestKit(ActorSystem("SbtActorTest")) with ImplicitS private val sbtActor = system.spawn( SbtProcess( SbtConf( - production = false, + remapSourceMapUrlBase = "http://localhost:9000", runTimeout = timeout, sbtReloadTimeout = 20.seconds ), diff --git a/server/src/main/resources/reference.conf b/server/src/main/resources/reference.conf index b3101aefd..6fc87b360 100644 --- a/server/src/main/resources/reference.conf +++ b/server/src/main/resources/reference.conf @@ -1,5 +1,5 @@ com.olegych.scastie.web { - production = false + embedded-url-base = "http://localhost:9000" session-secret = "WWItju7orWthk7vbAPqI72XOBCfZFxbVjMH169o9eLjHmMCGXw2VdBsQeTNF3WH0" oauth2 { diff --git a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala index 568b0c17e..efa938ffa 100644 --- a/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala +++ b/server/src/main/scala/com.olegych.scastie.web/ServerMain.scala @@ -84,7 +84,7 @@ private object Guardian { cors()( concat( new ScalaLangRoutes(dispatchActor, userDirectives).routes, - new FrontPageRoutes(webCfg.production).routes + new FrontPageRoutes(webCfg.embeddedUrlBase).routes ) ) ) @@ -99,14 +99,14 @@ private object Guardian { } case class WebConf( - production: Boolean, + embeddedUrlBase: String, oauth2: Oauth2Conf, sessionSecret: String, bind: BindConf, ) object WebConf { implicit val loader: ConfigLoader[WebConf] = (c: EnrichedConfig) => WebConf( - c.get[Boolean]("production"), + c.get[String]("embedded-url-base"), c.get[Oauth2Conf]("oauth2"), c.get[String]("session-secret"), c.get[BindConf]("bind") diff --git a/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala b/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala index c373ff267..3c07cdd92 100644 --- a/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala +++ b/server/src/main/scala/com.olegych.scastie.web/routes/FrontPageRoutes.scala @@ -6,7 +6,7 @@ import akka.http.scaladsl.server.Route import com.olegych.scastie.api.{SnippetId, SnippetUserPart} import com.olegych.scastie.util.Base64UUID -class FrontPageRoutes(production: Boolean) { +class FrontPageRoutes(embeddedUrlBase: String) { private def index = getFromResource("public/index.html") @@ -24,10 +24,6 @@ class FrontPageRoutes(production: Boolean) { val id = "id-" + Base64UUID.create - val embeddedUrlBase = - if (production) "https://scastie.scala-lang.org" - else "http://localhost:9000" - s"""|document.write(" |
| From cff758b460757f3d5f22885e88be451aebc298c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 18:18:59 +0700 Subject: [PATCH 37/42] sbt: dockerCompose task --- build.sbt | 1 + project/Deployment.scala | 18 ++++++++++++------ 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/build.sbt b/build.sbt index d3c66a7aa..bfaa18b6f 100644 --- a/build.sbt +++ b/build.sbt @@ -10,6 +10,7 @@ addCommandAlias("startAllProd", "sbtRunner/reStart;server/fullOptJS/reStart") addCommandAlias("fullBuildServer", "client/Compile/fullOptJS/webpack;server/docker") addCommandAlias("deployLocal", "fullBuildServer;sbtRunner/docker;deployLocalQuick") +addCommandAlias("dockerCompose", "server/docker;sbtRunner/docker;dockerComposeQuick") // Deploy server and sbt instances without building and pushing docker images addCommandAlias("deployQuick", "deployRunnersQuick;deployServerQuick") diff --git a/project/Deployment.scala b/project/Deployment.scala index 92723a29e..2997fa528 100644 --- a/project/Deployment.scala +++ b/project/Deployment.scala @@ -18,14 +18,15 @@ object Deployment { deployRunnersQuick := deployRunnersQuickTask(server, sbtRunner).value, deployServerQuick := deployServerQuickTask(server, sbtRunner).value, deployLocalQuick := deployLocalQuickTask(server, sbtRunner).value, - dockerCompose := dockerComposeTask(server, sbtRunner).value, + dockerComposeQuick := dockerComposeQuick(server, sbtRunner).value, ) lazy val deployRunnersQuick = taskKey[Unit]("Deploy sbt runners") lazy val deployServerQuick = taskKey[Unit]("Deploy server without building server zip") lazy val deployLocalQuick = taskKey[Unit]("Deploy locally") - lazy val dockerCompose = taskKey[Unit]( - "Create docker-compose.yml (alternative way to deploy locally)" + lazy val dockerComposeQuick = taskKey[Unit]( + "Create docker-compose.yml and run `docker-compose down; docker-compose up`" + + " (alternative way to deploy locally)" ) /** @@ -33,12 +34,13 @@ object Deployment { * - Don't mount secrets.conf and local.conf file to container * - Don't set -Dsentry.dsn, -Dconfig.file `docker run` options */ - private def dockerComposeTask(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = + private def dockerComposeQuick(server: Project, sbtRunner: Project): Def.Initialize[Task[Unit]] = Def.task { val log = streams.value.log val baseDir = (ThisBuild / baseDirectory).value + val deployer = Deployer.Local(baseDir, log) - Deployer.Local(baseDir, log).write("docker-compose.yml", + deployer.write("docker-compose.yml", s""" |# https://www.cloudsavvyit.com/10765/how-to-simplify-docker-compose-files-with-yaml-anchors-and-extensions/ |x-runner: &runner @@ -75,7 +77,8 @@ object Deployment { | - -Dakka.remote.artery.canonical.port=5150 |""".stripMargin) - log.info("Created docker-compose.yml. `docker-compose up` to start scastie.") + deployer.run("docker-compose down") + deployer.run("docker-compose up") } /** @param rmi if true then delete all $label images @@ -387,6 +390,9 @@ object Deployment { write(scriptName, scriptContent, executable = true) (rootDir / scriptName).absolutePath ! processLog(scriptName) } + + def run(command: String): Unit = + Process(command, rootDir) ! processLog(command.trim.takeWhile(_ != ' ')) } case class Remote(host: String, user: String, log: Logger, tempFile: File) extends Deployer { From e4f4266525e8a3cc09e244326c43e8bb5dda698d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 18:19:21 +0700 Subject: [PATCH 38/42] .gitignore --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 328af417e..f536bf829 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,5 @@ bin/.coursier bin/drone local/ metals.sbt -.bsp/ \ No newline at end of file +.bsp/ +/docker-compose.yml From 6435fb65d1f99ac9960488f5675ecd6fe5e6c5c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Fri, 13 Aug 2021 15:09:08 +0700 Subject: [PATCH 39/42] Update CONTRIBUTING.md & remove unused scripts --- CONTRIBUTING.md | 39 +++++++++++++++++++++------------------ deployment/test.sh | 16 ---------------- run.sh | 21 --------------------- 3 files changed, 21 insertions(+), 55 deletions(-) delete mode 100644 deployment/test.sh delete mode 100644 run.sh diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2749d7603..f1b1d2909 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -7,24 +7,24 @@ You are more than welcome to contribute any PR regardless if it's listed or not. ### How to install prerequisites via nix -``` +```shell curl https://nixos.org/nix/install | sh nix-shell -A scastie ``` ### How to install prerequisites on Mac -``` +```shell brew install openjdk sbt nodejs yarn ``` ### How to install prerequisites on Windows Assuming you use Git for Windows >= 2.16.2.1 (note this will erase uncommitted changes): -``` +```shell git config --add core.symlinks true git reset --hard HEAD ``` -``` +```shell choco install nvm yarn sbt jdk8 python3 nvm install 8.9.1 nvm use 8.9.1 @@ -107,7 +107,7 @@ If you have any questions join us in the [gitter channel](https://gitter.im/scal ## Quick -``` +```shell ssh scastie@alaska.epfl.ch ssh scastie@scastie.scala-lang.org ssh scastie@scastie-sbt.scala-lang.org @@ -122,10 +122,10 @@ deploy ``` ## Check logs -``` +```shell ssh scastie@alaska.epfl.ch ssh scastie@scastie.scala-lang.org -tail -F -n1000 output.log +docker logs -f scastie-server ssh scastie@scastie-sbt.scala-lang.org ~/log.sh ``` @@ -162,7 +162,7 @@ These people have access: In case anything goes wrong: -``` +```shell ssh scastie@alaska.epfl.ch ssh scastie@scastie.scala-lang.org ssh scastie@scastie-sbt.scala-lang.org @@ -172,17 +172,20 @@ exit ``` # Running with docker locally - +There are 2 options: +1. Using [docker-compose](https://docs.docker.com/compose/install/) +```shell +sbt dockerCompose ``` -git commit -sbt "sbtRunner/docker" +`dockerCompose` task will build scastie docker images and create `docker-compose.yml` +and run `docker-compose down;docker-compose up`. +See `dockerCompose` alias defined in `build.sbt` for more info. -docker run \ - --network=host \ - -e RUNNER_PORT=5150 \ - -e RUNNER_HOSTNAME=127.0.0.1 \ - -e RUNNER_RECONNECT=false \ - -e RUNNER_PRODUCTION=true \ - scalacenter/scastie-sbt-runner:`git rev-parse --verify HEAD` +2. Let `sbt` run `docker` commands directly instead of using `docker-compose` +```shell +sbt deployLocal ``` +`deployLocal` task will build scastie docker images and deploy deployment files into `local` folder +and run the `*.sh` file in that folder. +See `deployLocal` alias defined in `build.sbt` for more info. diff --git a/deployment/test.sh b/deployment/test.sh deleted file mode 100644 index cc1ba4af3..000000000 --- a/deployment/test.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -VERSION=0.26.0 -HASH=7eff4af3d10a9c966e1988257f5d00c4600b647f -FULL="$VERSION+$HASH" - -unzip server/target/universal/server-$FULL.zip -server-$FULL/bin/server - -docker run \ - --network=host \ - -e RUNNER_PORT=5150 \ - -e RUNNER_HOSTNAME=localhost \ - -e RUNNER_RECONNECT=false \ - -e RUNNER_PRODUCTION=true \ - scalacenter/scastie-sbt-runner:$HASH diff --git a/run.sh b/run.sh deleted file mode 100644 index 0f79143de..000000000 --- a/run.sh +++ /dev/null @@ -1,21 +0,0 @@ -docker run \ - --network=host \ - -d \ - -e RUNNER_PRODUCTION=true \ - -e RUNNER_PORT=5150 \ - -e SERVER_HOSTNAME=127.0.0.1 \ - -e SERVER_AKKA_PORT=15000 \ - -e RUNNER_HOSTNAME=127.0.0.1 \ - scalacenter/scastie-sbt-runner:9f373af33ffd1d479f777e2e7d7f1b020c1eaf32 - -docker run \ - --network=host \ - -d \ - -e RUNNER_PRODUCTION=true \ - -e RUNNER_PORT=5151 \ - -e SERVER_HOSTNAME=127.0.0.1 \ - -e SERVER_AKKA_PORT=15000 \ - -e RUNNER_HOSTNAME=127.0.0.1 \ - scalacenter/scastie-sbt-runner:9f373af33ffd1d479f777e2e7d7f1b020c1eaf32 - -docker ps | grep scastie-sbt-runner | awk '{print $1}' | xargs docker stop From 4cf0d9e890d194c0e07266f21e5662915a3f9f3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 16:16:31 +0700 Subject: [PATCH 40/42] Remove an unused folder server/src/main/univarsal --- server/src/main/universal/conf/application.conf | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 server/src/main/universal/conf/application.conf diff --git a/server/src/main/universal/conf/application.conf b/server/src/main/universal/conf/application.conf deleted file mode 100644 index 1b18f23e1..000000000 --- a/server/src/main/universal/conf/application.conf +++ /dev/null @@ -1,3 +0,0 @@ -com.olegych.scastie.web { - server.production = true -} From 0e2caf73d5f665dda89f219a91b174a598209f79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Wed, 18 Aug 2021 00:48:44 +0700 Subject: [PATCH 41/42] Do not flood the console log when asking SnippetProgress + Exception `e` always be `AskTimeoutException` + The `scalaJsContent` and `scalaJsSourceMapContent` may be very long Strings! --- .../scala/com.olegych.scastie.api/SnippetProgress.scala | 7 +++++++ .../main/scala/com.olegych.scastie.sbt/SbtProcess.scala | 3 +-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala b/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala index 7408d61d8..0f35f61f0 100644 --- a/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala +++ b/api/src/main/scala/com.olegych.scastie.api/SnippetProgress.scala @@ -46,4 +46,11 @@ case class SnippetProgress( def isFailure: Boolean = isTimeout || isSbtError || runtimeError.nonEmpty || compilationInfos.exists(_.severity == Error) override def toString: String = Json.toJsObject(this).toString() + + def logMsg: String = Json.toJsObject( + copy( + scalaJsContent = this.scalaJsContent.map(_ => "..."), + scalaJsSourceMapContent = this.scalaJsSourceMapContent.map(_ => "...") + ) + ).toString() } diff --git a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala index 4d4692242..b6c537f20 100644 --- a/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala +++ b/sbt-runner/src/main/scala/com.olegych.scastie.sbt/SbtProcess.scala @@ -90,8 +90,7 @@ class SbtProcess private ( implicit val sc = context.system.scheduler run.snippetActor.ask(SnippetProgressAsk(_, p)) .recover { - case e => - safeLog.error(s"error while saving progress $p", e) + case e => safeLog.error(s"error while saving progress ${p.logMsg}") } } From 459088c8fbb25fa353b4b4d5d24ef4006f66a3eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bu=CC=80i=20Vie=CC=A3=CC=82t=20Tha=CC=80nh?= Date: Mon, 16 Aug 2021 15:08:29 +0700 Subject: [PATCH 42/42] Use vagrant to test production deployment --- .gitignore | 1 + CONTRIBUTING.md | 3 ++ Vagrantfile | 127 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 131 insertions(+) create mode 100644 Vagrantfile diff --git a/.gitignore b/.gitignore index f536bf829..451a7c118 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ local/ metals.sbt .bsp/ /docker-compose.yml +/.vagrant diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f1b1d2909..ed9e3e0aa 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -171,6 +171,9 @@ exit ./server.sh ``` +# Run/test `deploy` task on development machine using vagrant & virtualbox +See guide in [Vagrantfile] + # Running with docker locally There are 2 options: 1. Using [docker-compose](https://docs.docker.com/compose/install/) diff --git a/Vagrantfile b/Vagrantfile new file mode 100644 index 000000000..2045c7cc6 --- /dev/null +++ b/Vagrantfile @@ -0,0 +1,127 @@ +# This `Vagrantfile` is used to test/run sbt deploy task on development machine +# using deployment/production.conf to ensure everything is ok before deploying to production env. +# Steps: +# 0. Install vagrant and virtualbox +# 1. `vagrant up` in this `scastie` directory to create & start 2 ubuntu VMs +# which will be used to run sbt-runners and server (and mongodb) +# 2. You should (optional) use static LAN IP. In this guide, we use `192.168.86.147` +# 3. Run [docker registry](https://docs.docker.com/registry/) locally +# `docker run -d -p 5000:5000 --name registry registry:2` +# 4. Add `"insecure-registries": ["192.168.86.147:5000"]` into your +# [docker daemon config](https://docs.docker.com/config/daemon/#configure-the-docker-daemon) +# Then restart your docker daemon +# 5. Change `ImageName` in `docker / imageNames` in project/DockerHelper.scala, add: +# registry = Some("192.168.86.147:5000") +# 6. Add to your ~/.ssh/config: +# Host scastie-sbt.scala-lang.org +# HostName 192.168.33.10 +# User scastie +# IdentityFile ~/.vagrant.d/insecure_private_key +# CheckHostIP no +# StrictHostKeyChecking no +# PasswordAuthentication no +# IdentitiesOnly yes +# UserKnownHostsFile /dev/null +# Host scastie.scala-lang.org +# HostName 192.168.33.12 +# ... same as above +# +# Note: 192.168.33.10, 192.168.33.12 are IPs of `runner`, `server` VMs as defined bellow. +# +# 7. Confirm that you can ssh to the VMs (and not the actual production servers): +# ``` +# ssh scastie.scala-lang.org 'ip -4 -brief addr | grep 192.168' +# ssh scastie-sbt.scala-lang.org 'ip -4 -brief addr | grep 192.168' +# ``` +# The output must not empty and contains `192.168.33.10`, `192.168.33.12` +# +# 8. Change deployment/production.conf: sbt-runners.ports-size = 2 +# 9. If you don't have access to `github.com/scalacenter/scastie-secrets`: +# Change `secretsFile` to `SecretsFile.local(..)` in +# deployRunnersQuick, deployServerQuick taskDefs in project/Deployment.scala +# 10. Run `sbt deploy` to deploy scastie to VMs +# To speedup the redeployment process, you can see: +# + The `deploy` command alias defined in build.sbt +# + The comment about `addInstructions` in project/DockerHelper.runnerDockerfile +# 11. You should revert step 6, 8 after done testing/ deploying with vagrant +# +Vagrant.configure("2") do |config| + # config.ssh.username = "scastie" + config.vm.box = "ubuntu/focal64" + + # disable box_check_update and synced_folder to speed up + config.vm.box_check_update = false + config.vm.synced_folder ".", "/vagrant", disabled: true + config.ssh.insert_key = false + + config.vm.provision "shell", inline: <<-SHELL + # Install docker + apt-get update + apt-get install -y apt-transport-https curl + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg + echo \ + "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ + $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null + apt-get update + apt-get install -y docker-ce docker-ce-cli containerd.io + + # Add user `scastie` with group `scastie` and supplementary groups `docker` + groupadd -g 433 scastie + useradd scastie --uid 433 --gid 433 --create-home --shell /bin/bash --groups docker + + # Setup ssh and sudo for user `scastie` + cp -pr /home/vagrant/.ssh /home/scastie/ + chown -R scastie:scastie /home/scastie/.ssh + echo "%scastie ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/scastie + + # add my own registry + echo '{ "insecure-registries": ["192.168.86.147:5000"] }' > /etc/docker/daemon.json + service docker restart + + echo "192.168.33.10 scastie-sbt.scala-lang.org" >> /etc/hosts + echo "192.168.33.12 scastie.scala-lang.org" >> /etc/hosts + SHELL + + config.vm.define "runner" do |c| + c.vm.network "private_network", ip: "192.168.33.10" + c.vm.hostname = "runner" + c.vm.provider "virtualbox" do |vb| + vb.cpus = 2 + vb.memory = "2560" + end + end + + config.vm.define "server" do |c| + c.vm.network "private_network", ip: "192.168.33.12" + c.vm.hostname = "server" + c.vm.provider "virtualbox" do |vb| + vb.cpus = 2 + vb.memory = "2560" + end + + # upload to /home/vagrant/.ssh/ + c.vm.provision "file", + source: "~/.vagrant.d/insecure_private_key", + destination: "~/.ssh/insecure_private_key" + + c.vm.provision "shell", inline: <<-SHELL + # config ssh so scastie on server can ssh runner + echo "Host scastie-sbt.scala-lang.org + IdentityFile ~/.ssh/insecure_private_key + CheckHostIP no + StrictHostKeyChecking no + PasswordAuthentication no + IdentitiesOnly yes + UserKnownHostsFile /dev/null" > /home/scastie/.ssh/config + chmod 600 /home/scastie/.ssh/config + + install -m 600 \ + /home/vagrant/.ssh/insecure_private_key \ + /home/scastie/.ssh/insecure_private_key + chown -R scastie:scastie /home/scastie/.ssh + + # run mongodb on `server` + docker run -d --restart=always --name mongo --network=host -v /opt/mongo_data:/data/db mongo + SHELL + end +end