Skip to content

Commit

Permalink
Switch to Scala 2.12.7 (apache#4062)
Browse files Browse the repository at this point in the history
  • Loading branch information
chetanmeh authored and csantanapr committed Nov 6, 2018
1 parent cc07120 commit 4dc2de9
Show file tree
Hide file tree
Showing 24 changed files with 69 additions and 84 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ group: deprecated-2017Q3

language: scala
scala:
- 2.11.8
- 2.12.7

services:
- docker
Expand Down
28 changes: 14 additions & 14 deletions common/scala/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,18 @@ repositories {
dependencies {
compile "org.scala-lang:scala-library:${gradle.scala.version}"

compile 'com.github.pureconfig:pureconfig_2.11:0.9.0'
compile 'io.spray:spray-json_2.11:1.3.4'
compile 'com.lihaoyi:fastparse_2.11:1.0.0'
compile 'com.github.pureconfig:pureconfig_2.12:0.9.0'
compile 'io.spray:spray-json_2.12:1.3.4'
compile 'com.lihaoyi:fastparse_2.12:1.0.0'

compile 'com.typesafe.akka:akka-actor_2.11:2.5.12'
compile 'com.typesafe.akka:akka-stream_2.11:2.5.12'
compile 'com.typesafe.akka:akka-slf4j_2.11:2.5.12'
compile 'com.typesafe.akka:akka-actor_2.12:2.5.12'
compile 'com.typesafe.akka:akka-stream_2.12:2.5.12'
compile 'com.typesafe.akka:akka-slf4j_2.12:2.5.12'

compile 'com.typesafe.akka:akka-http-core_2.11:10.1.1'
compile 'com.typesafe.akka:akka-http-spray-json_2.11:10.1.1'
compile 'com.typesafe.akka:akka-http-core_2.12:10.1.1'
compile 'com.typesafe.akka:akka-http-spray-json_2.12:10.1.1'

compile 'com.lightbend.akka:akka-stream-alpakka-file_2.11:0.15'
compile 'com.lightbend.akka:akka-stream-alpakka-file_2.12:0.15'

compile 'ch.qos.logback:logback-classic:1.2.3'
compile 'org.slf4j:jcl-over-slf4j:1.7.25'
Expand All @@ -61,10 +61,10 @@ dependencies {
compile 'com.github.ben-manes.caffeine:caffeine:2.6.2'
compile 'com.google.code.findbugs:jsr305:3.0.2'
compile 'io.fabric8:kubernetes-client:4.0.3'
compile 'io.kamon:kamon-core_2.11:0.6.7'
compile 'io.kamon:kamon-statsd_2.11:0.6.7'
compile 'io.kamon:kamon-core_2.12:0.6.7'
compile 'io.kamon:kamon-statsd_2.12:0.6.7'
//for mesos
compile 'com.adobe.api.platform.runtime:mesos-actor:0.0.8'
compile 'com.adobe.api.platform.runtime:mesos-actor:0.0.8_2.12'

//tracing support
compile 'io.opentracing:opentracing-api:0.31.0'
Expand All @@ -73,11 +73,11 @@ dependencies {
compile 'io.zipkin.reporter2:zipkin-sender-okhttp3:2.6.1'
compile 'io.zipkin.reporter2:zipkin-reporter:2.6.1'

compile 'io.reactivex:rxscala_2.11:0.26.5'
compile 'io.reactivex:rxscala_2.12:0.26.5'
compile 'io.reactivex:rxjava-reactive-streams:1.2.1'
compile 'com.microsoft.azure:azure-cosmosdb:2.1.0'

compile ('com.lightbend.akka:akka-stream-alpakka-s3_2.11:0.19') {
compile ('com.lightbend.akka:akka-stream-alpakka-s3_2.12:0.19') {
exclude group: 'commons-logging'
exclude group: 'org.apache.httpcomponents' //Not used as alpakka uses akka-http
exclude group: 'com.fasterxml.jackson.core'
Expand Down
1 change: 1 addition & 0 deletions common/scala/src/main/scala/whisk/common/Logging.scala
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ class PrintStreamLogging(outputStream: PrintStream = Console.out) extends Loggin
case InfoLevel => "INFO"
case WarningLevel => "WARN"
case ErrorLevel => "ERROR"
case LogLevel(_) => "UNKNOWN"
}

val logMessage = Seq(message).collect {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ object ActivationMessage extends DefaultJsonProtocol {
* Message that is sent from the invoker to the controller after action is completed or after slot is free again for
* new actions.
*/
abstract class AcknowledegmentMessage() extends Message {
override val transid: TransactionId
abstract class AcknowledegmentMessage(private val tid: TransactionId) extends Message {
override val transid: TransactionId = tid
override def serialize: String = {
AcknowledegmentMessage.serdes.write(this).compactPrint
}
Expand All @@ -90,7 +90,7 @@ case class CompletionMessage(override val transid: TransactionId,
activationId: ActivationId,
isSystemError: Boolean,
invoker: InvokerInstanceId)
extends AcknowledegmentMessage() {
extends AcknowledegmentMessage(transid) {

override def toString = {
activationId.asString
Expand All @@ -109,7 +109,7 @@ object CompletionMessage extends DefaultJsonProtocol {
* The whisk activation field will have its logs stripped.
*/
case class ResultMessage(override val transid: TransactionId, response: Either[ActivationId, WhiskActivation])
extends AcknowledegmentMessage() {
extends AcknowledegmentMessage(transid) {

override def toString = {
response.fold(l => l, r => r.activationId).asString
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -555,10 +555,10 @@ class CouchDbRestStore[DocumentAbstraction <: DocumentSerializer](dbProtocol: St
}

private def reportFailure[T, U](f: Future[T], onFailure: Throwable => U): Future[T] = {
f.onFailure({
f.failed.foreach {
case _: ArtifactStoreException => // These failures are intentional and shouldn't trigger the catcher.
case x => onFailure(x)
})
}
f
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ private[database] object StoreUtils {
implicit transid: TransactionId,
logging: Logging,
ec: ExecutionContext): Future[T] = {
f.onFailure({
f.failed.foreach {
case _: ArtifactStoreException => // These failures are intentional and shouldn't trigger the catcher.
case x =>
transid.failed(this, start, s"${failureMessage(x)} [${x.getClass.getSimpleName}]", ErrorLevel)
})
}
f
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -238,9 +238,9 @@ class CosmosDBArtifactStore[DocumentAbstraction <: DocumentSerializer](protected
.runWith(Sink.seq)
.map(_.toList)

f.onSuccess({
case out => transid.finished(this, start, s"[QUERY] '$collName' completed: matched ${out.size}")
})
f.foreach { out =>
transid.finished(this, start, s"[QUERY] '$collName' completed: matched ${out.size}")
}
reportFailure(f, start, failure => s"[QUERY] '$collName' internal error, failure: '${failure.getMessage}'")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ import whisk.http.Messages
import scala.collection.concurrent.TrieMap
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.ClassTag
import scala.util.Try
import scala.util.{Failure, Success, Try}

object MemoryArtifactStoreProvider extends ArtifactStoreProvider {
override def makeStore[D <: DocumentSerializer: ClassTag](useBatching: Boolean)(
Expand Down Expand Up @@ -130,15 +130,12 @@ class MemoryArtifactStore[DocumentAbstraction <: DocumentSerializer](dbName: Str
}

val f = Future.fromTry(t)

f.onFailure({
case _: DocumentConflictException =>
f.onComplete {
case Success(_) => transid.finished(this, start, s"[PUT] '$dbName' completed document: '$docinfoStr'")
case Failure(_: DocumentConflictException) =>
transid.finished(this, start, s"[PUT] '$dbName', document: '$docinfoStr'; conflict.")
})

f.onSuccess({
case _ => transid.finished(this, start, s"[PUT] '$dbName' completed document: '$docinfoStr'")
})
case Failure(_) =>
}

reportFailure(f, start, failure => s"[PUT] '$dbName' internal error, failure: '${failure.getMessage}'")
}
Expand Down Expand Up @@ -239,9 +236,7 @@ class MemoryArtifactStore[DocumentAbstraction <: DocumentSerializer](dbName: Str
}.toList

val f = Future.sequence(r).map(_.flatten)
f.onSuccess({
case _ => transid.finished(this, start, s"[QUERY] '$dbName' completed: matched ${out.size}")
})
f.foreach(_ => transid.finished(this, start, s"[QUERY] '$dbName' completed: matched ${out.size}"))
reportFailure(f, start, failure => s"[QUERY] '$dbName' internal error, failure: '${failure.getMessage}'")

}
Expand Down Expand Up @@ -270,10 +265,8 @@ class MemoryArtifactStore[DocumentAbstraction <: DocumentSerializer](dbName: Str
} else {
val storedName = attachmentUri.path.toString()
val f = attachmentStore.readAttachment(doc.id, storedName, sink)
f.onSuccess {
case _ =>
transid.finished(this, start, s"[ATT_GET] '$dbName' completed: found attachment '$name' of document '$doc'")
}
f.foreach(_ =>
transid.finished(this, start, s"[ATT_GET] '$dbName' completed: found attachment '$name' of document '$doc'"))
f
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,11 +87,9 @@ class S3AttachmentStore(client: S3Client, bucket: String, prefix: String)(implic
.runWith(combinedSink(client.multipartUpload(bucket, objectKey(docId, name), contentType)))
.map(r => AttachResult(r.digest, r.length))

f.onSuccess({
case _ =>
transid
.finished(this, start, s"[ATT_PUT] '$prefix' completed uploading attachment '$name' of document 'id: $docId'")
})
f.foreach(_ =>
transid
.finished(this, start, s"[ATT_PUT] '$prefix' completed uploading attachment '$name' of document 'id: $docId'"))

reportFailure(
f,
Expand Down Expand Up @@ -147,10 +145,8 @@ class S3AttachmentStore(client: S3Client, bucket: String, prefix: String)(implic
.runWith(Sink.seq)
.map(_ => true)

f.onSuccess {
case _ =>
transid.finished(this, start, s"[ATTS_DELETE] completed: deleting attachments of document 'id: $docId'")
}
f.foreach(_ =>
transid.finished(this, start, s"[ATTS_DELETE] completed: deleting attachments of document 'id: $docId'"))

reportFailure(
f,
Expand All @@ -167,10 +163,8 @@ class S3AttachmentStore(client: S3Client, bucket: String, prefix: String)(implic
.deleteObject(bucket, objectKey(docId, name))
.map(_ => true)

f.onSuccess {
case _ =>
transid.finished(this, start, s"[ATT_DELETE] completed: deleting attachment '$name' of document 'id: $docId'")
}
f.foreach(_ =>
transid.finished(this, start, s"[ATT_DELETE] completed: deleting attachment '$name' of document 'id: $docId'"))

reportFailure(
f,
Expand Down
2 changes: 1 addition & 1 deletion common/scala/src/main/scala/whisk/core/entity/Limits.scala
Original file line number Diff line number Diff line change
Expand Up @@ -82,5 +82,5 @@ protected[core] object ActionLimits extends ArgNormalizer[ActionLimits] with Def

protected[core] object TriggerLimits extends ArgNormalizer[TriggerLimits] with DefaultJsonProtocol {

override protected[core] implicit val serdes = jsonFormat0(TriggerLimits.apply)
override protected[core] implicit val serdes = jsonFormat0(TriggerLimits.apply _)
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class MesosContainerFactory(config: WhiskConfig,
loadConfigOrThrow[ContainerArgsConfig](ConfigKeys.containerArgs),
mesosConfig: MesosConfig = loadConfigOrThrow[MesosConfig](ConfigKeys.mesos),
clientFactory: (ActorSystem, MesosConfig) => ActorRef = MesosContainerFactory.createClient,
taskIdGenerator: () => String = MesosContainerFactory.taskIdGenerator)
taskIdGenerator: () => String = MesosContainerFactory.taskIdGenerator _)
extends ContainerFactory {

val subscribeTimeout = 10.seconds
Expand Down
6 changes: 3 additions & 3 deletions core/controller/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ repositories {

dependencies {
compile "org.scala-lang:scala-library:${gradle.scala.version}"
compile 'com.lightbend.akka.management:akka-management-cluster-bootstrap_2.11:0.11.0'
compile 'com.lightbend.akka.discovery:akka-discovery-kubernetes-api_2.11:0.11.0'
compile 'com.lightbend.akka.discovery:akka-discovery-marathon-api_2.11:0.11.0'
compile 'com.lightbend.akka.management:akka-management-cluster-bootstrap_2.12:0.11.0'
compile 'com.lightbend.akka.discovery:akka-discovery-kubernetes-api_2.12:0.11.0'
compile 'com.lightbend.akka.discovery:akka-discovery-marathon-api_2.12:0.11.0'
compile project(':common:scala')
scoverage gradle.scoverage.deps
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ object BasicAuthenticationDirective extends AuthenticationDirectiveProvider {
logging.debug(this, s"authentication not valid")
None
}
future onFailure { case t => logging.error(this, s"authentication error: $t") }
future.failed.foreach(t => logging.error(this, s"authentication error: $t"))
future
}.toOption
} getOrElse {
Expand Down
6 changes: 3 additions & 3 deletions settings.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ include 'tools:admin'
rootProject.name = 'openwhisk'

gradle.ext.scala = [
version: '2.11.11',
version: '2.12.7',
compileFlags: ['-feature', '-unchecked', '-deprecation', '-Xfatal-warnings', '-Ywarn-unused-import']
]

Expand All @@ -44,8 +44,8 @@ gradle.ext.scalafmt = [

gradle.ext.scoverage = [
deps: [
'org.scoverage:scalac-scoverage-plugin_2.11:1.3.1',
'org.scoverage:scalac-scoverage-runtime_2.11:1.3.1'
'org.scoverage:scalac-scoverage-plugin_2.12:1.3.1',
'org.scoverage:scalac-scoverage-runtime_2.12:1.3.1'
]
]

Expand Down
8 changes: 4 additions & 4 deletions tests/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -152,11 +152,11 @@ dependencies {
compile 'org.apache.httpcomponents:httpmime:4.3.6'
compile 'junit:junit:4.11'
compile 'com.jayway.restassured:rest-assured:2.6.0'
compile 'org.scalatest:scalatest_2.11:3.0.1'
compile 'com.typesafe.akka:akka-testkit_2.11:2.5.12'
compile 'org.scalatest:scalatest_2.12:3.0.1'
compile 'com.typesafe.akka:akka-testkit_2.12:2.5.12'
compile 'com.google.code.gson:gson:2.3.1'
compile 'org.scalamock:scalamock-scalatest-support_2.11:3.4.2'
compile 'com.typesafe.akka:akka-http-testkit_2.11:10.1.1'
compile 'org.scalamock:scalamock-scalatest-support_2.12:3.4.2'
compile 'com.typesafe.akka:akka-http-testkit_2.12:10.1.1'
compile 'com.github.java-json-tools:json-schema-validator:2.2.8'
compile "org.mockito:mockito-core:2.15.0"
compile 'io.opentracing:opentracing-mock:0.31.0'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,6 @@ object ActionContainer {
}
private def concurrentSyncPost(host: String, port: Int, endPoint: String, contents: Seq[JsValue])(
implicit logging: Logging,
ec: ExecutionContext,
as: ActorSystem): Seq[(Int, Option[JsObject])] = {

implicit val transid = TransactionId.testing
Expand Down
4 changes: 2 additions & 2 deletions tests/src/test/scala/common/RunCliCmd.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package common

import java.io.File

import scala.collection.JavaConversions.mapAsJavaMap
import scala.collection.JavaConverters._
import scala.collection.mutable.Buffer
import org.scalatest.Matchers
import TestUtils._
Expand Down Expand Up @@ -48,7 +48,7 @@ trait RunCliCmd extends Matchers {
env: Map[String, String],
fileStdin: Option[File],
params: Seq[String]): RunResult = {
TestUtils.runCmd(expectedExitCode, dir, TestUtils.logger, env, fileStdin.getOrElse(null), params: _*)
TestUtils.runCmd(expectedExitCode, dir, TestUtils.logger, env.asJava, fileStdin.getOrElse(null), params: _*)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import java.util.concurrent.LinkedBlockingQueue

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._

import org.apache.kafka.clients.producer.RecordMetadata
import org.apache.kafka.common.TopicPartition
Expand All @@ -41,7 +41,7 @@ class TestConnector(topic: String, override val maxPeek: Int, allowMoreThanMax:
val msgs = new ArrayList[Message]
queue.synchronized {
queue.drainTo(msgs, if (allowMoreThanMax) Int.MaxValue else maxPeek)
msgs map { m =>
msgs.asScala map { m =>
offset += 1
(topic, -1, offset, m.serialize.getBytes)
}
Expand Down Expand Up @@ -87,7 +87,7 @@ class TestConnector(topic: String, override val maxPeek: Int, allowMoreThanMax:

def sendBulk(topic: String, msgs: Seq[Message]): Future[RecordMetadata] = {
queue.synchronized {
if (queue.addAll(msgs)) {
if (queue.addAll(msgs.asJava)) {
logging.info(this, s"put: ${msgs.length} messages")
Future.successful(new RecordMetadata(new TopicPartition(topic, 0), 0, queue.size, -1, Long.box(-1L), -1, -1))
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner

import scala.concurrent.ExecutionContext.Implicits.global
import whisk.core.containerpool.docker._

import scala.concurrent.ExecutionContext
Expand Down
Loading

0 comments on commit 4dc2de9

Please sign in to comment.