Skip to content

Commit

Permalink
chore: fix remaining build errors (#2250)
Browse files Browse the repository at this point in the history
* chore: fix remaining build errors

* chore: fix remaining build errors

* chore: fix remaining build errors

* chore: fix remaining build errors
  • Loading branch information
mhamilton723 authored Jul 17, 2024
1 parent a702985 commit 33180ef
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ object RESTHelpers {
}

def safeSend(request: HttpRequestBase,
backoffs: List[Int] = List(100, 500, 1000), //scalastyle:ignore magic.number
backoffs: List[Int] = List(100, 500, 1000), //scalastyle:ignore magic.number
expectedCodes: Set[Int] = Set(),
close: Boolean = true): CloseableHttpResponse = {

Expand Down Expand Up @@ -92,8 +92,11 @@ object RESTHelpers {
IOUtils.toString(result.getEntity.getContent, "utf-8")
}

def sendAndParseJson(request: HttpRequestBase, expectedCodes: Set[Int]=Set()): JsValue = {
val response = safeSend(request, expectedCodes=expectedCodes, close=false)
def sendAndParseJson(request: HttpRequestBase,
expectedCodes: Set[Int] = Set(),
backoffs: List[Int] = List(100, 500, 1000) //scalastyle:ignore magic.number
): JsValue = {
val response = safeSend(request, expectedCodes = expectedCodes, close = false, backoffs = backoffs)
val output = parseResult(response).parseJson
response.close()
output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import scala.collection.immutable.Map
import scala.collection.mutable
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future, blocking}
import scala.util.Random

object DatabricksUtilities {

Expand Down Expand Up @@ -116,7 +117,10 @@ object DatabricksUtilities {
def databricksGet(path: String, apiVersion: String = "2.0"): JsValue = {
val request = new HttpGet(baseURL(apiVersion) + path)
request.addHeader("Authorization", AuthValue)
RESTHelpers.sendAndParseJson(request)
val random = new Random() // Use a jittered retry to avoid overwhelming
RESTHelpers.sendAndParseJson(request, backoffs = List.fill(3) {
1000 + random.nextInt(1000)
})
}

//TODO convert all this to typed code
Expand Down Expand Up @@ -332,7 +336,8 @@ object DatabricksUtilities {
lifeCycleState = lcs
if (logLevel >= 2) println(s"Job $runId state: $lifeCycleState")
blocking {
Thread.sleep(interval.toLong)
val random = new Random() // Use a jittered retry to avoid overwhelming
Thread.sleep(interval.toLong + random.nextInt(1000))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@
" spark.read.format(\"binaryFile\")\n",
" .option(\"pathGlobFilter\", \"*.jpg\")\n",
" .load(folder_path + \"/train\")\n",
" .sample(0.5) # For demo purposes\n",
" .withColumn(\"image\", regexp_replace(\"path\", \"dbfs:\", \"/dbfs\"))\n",
" .withColumn(\"label\", assign_label_udf(col(\"path\")))\n",
" .select(\"image\", \"label\")\n",
Expand Down
20 changes: 9 additions & 11 deletions pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -116,19 +116,17 @@ jobs:
PGP-PUBLIC: $(pgp-public)
PGP-PW: $(pgp-pw)
SYNAPSEML_ENABLE_PUBLISH: true
- bash: |
set -e
sbt publishBadges
- task: AzureCLI@2
inputs:
azureSubscription: 'SynapseML Build'
scriptLocation: inlineScript
scriptType: bash
inlineScript: |
set -e
sbt publishBadges
condition: and(succeeded(), eq(variables.isMaster, true))
displayName: Publish Badges
env:
STORAGE-KEY: $(storage-key)
NEXUS-UN: $(nexus-un)
NEXUS-PW: $(nexus-pw)
PGP-PRIVATE: $(pgp-private)
PGP-PUBLIC: $(pgp-public)
PGP-PW: $(pgp-pw)
SYNAPSEML_ENABLE_PUBLISH: true


- job: E2E
timeoutInMinutes: 120
Expand Down

0 comments on commit 33180ef

Please sign in to comment.